Upload Project TFM
This commit is contained in:
commit
4e4ad51596
109
create_models.py
Normal file
109
create_models.py
Normal file
@ -0,0 +1,109 @@
|
||||
##Impor export model
|
||||
import joblib
|
||||
##Algs
|
||||
from sklearn.svm import SVC
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
##Metrics
|
||||
from sklearn.metrics import accuracy_score,confusion_matrix
|
||||
##Preprocessing
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
##Data
|
||||
import pandas as pd
|
||||
from tabulate import tabulate
|
||||
|
||||
|
||||
def save_model(model,name):
|
||||
joblib.dump(model, name)
|
||||
|
||||
def data_load():
|
||||
head_names=["src_ip","received_bytes","send_bytes","port_duration","total_duration","jitter","response_icmp","differents_port","value"]
|
||||
df = pd.read_csv('dataset_final.csv', delimiter=',', header=None) ##train
|
||||
df_test = pd.read_csv('dataset_test_final.csv', delimiter=',', header=None) ##test
|
||||
|
||||
df[8] = 0
|
||||
df_test[8] = 0
|
||||
|
||||
df.columns = head_names
|
||||
df_test.columns = head_names
|
||||
|
||||
for index,row in df.iterrows():
|
||||
if row['src_ip'] == '192.168.20.3':
|
||||
df.at[index, 'value'] = 1
|
||||
|
||||
for index,row in df_test.iterrows():
|
||||
if row['src_ip'] == '192.168.20.3':
|
||||
df_test.at[index, 'value'] = 1
|
||||
|
||||
|
||||
df.pop('src_ip')
|
||||
df.pop('response_icmp')
|
||||
y_train = df.pop('value')
|
||||
x_train = df
|
||||
|
||||
df_test.pop('src_ip')
|
||||
df_test.pop('response_icmp')
|
||||
y_test = df_test.pop('value')
|
||||
x_test = df_test
|
||||
return x_train, y_train, x_test, y_test
|
||||
|
||||
def print_results(y_test,y_pred):
|
||||
|
||||
accuracy = accuracy_score(y_test, y_pred)
|
||||
print(f"Acc del modelo: {accuracy}")
|
||||
|
||||
TN, FP, FN, TP = confusion_matrix(y_test, y_pred).ravel()
|
||||
|
||||
print("Confusion Matrix:")
|
||||
data_table = [[TP, FN],
|
||||
[FP, TN]]
|
||||
table = tabulate(data_table, tablefmt="grid")
|
||||
print(table)
|
||||
print("Metrics:")
|
||||
TPR = TP / (TP + FN)
|
||||
FAR = FP / (FP + TN)
|
||||
Prec = TP / (TP+ FP)
|
||||
|
||||
print("TPR: " + str(TPR))
|
||||
print("FAR: " + str(FAR))
|
||||
print("Prec: " + str(Prec))
|
||||
|
||||
def dt(x_train, y_train, x_test, y_test):
|
||||
class_weights = {1: 0.5, 0: 3.5}
|
||||
dt = DecisionTreeClassifier(max_depth=8, min_samples_leaf=13, min_samples_split=6, class_weight=class_weights)
|
||||
dt.fit(x_train, y_train)
|
||||
y_pred = dt.predict(x_test)
|
||||
print_results(y_test,y_pred)
|
||||
save_model(dt,'DT.pkl')
|
||||
|
||||
def mlp(x_train, y_train, x_test, y_test):
|
||||
mlp = MLPClassifier(hidden_layer_sizes=(128, 64), max_iter=100, activation= 'tanh', solver= 'adam', learning_rate_init=0.0003)
|
||||
mlp.fit(x_train, y_train)
|
||||
y_pred = mlp.predict(x_test)
|
||||
print_results(y_test,y_pred)
|
||||
save_model(mlp,'MLP.pkl')
|
||||
|
||||
def svc(x_train, y_train, x_test, y_test):
|
||||
x_train = StandardScaler().fit_transform(x_train)
|
||||
x_test = StandardScaler().fit_transform(x_test)
|
||||
svc = SVC(kernel='linear')
|
||||
svc.fit(x_train, y_train)
|
||||
y_pred = svc.predict(x_test)
|
||||
print_results(y_test,y_pred)
|
||||
save_model(svc,'SVC.pkl')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
modelo = input("Please, enter model (DT,MLP,SVC): ")
|
||||
if modelo == 'DT' or modelo == 'MLP' or modelo == 'SVC':
|
||||
x_train, y_train, x_test, y_test = data_load()
|
||||
if modelo == 'DT':
|
||||
dt(x_train, y_train, x_test, y_test)
|
||||
elif modelo == 'MLP':
|
||||
mlp(x_train, y_train, x_test, y_test)
|
||||
else:
|
||||
svc(x_train, y_train, x_test, y_test)
|
||||
else:
|
||||
print("Modelo no valido")
|
||||
|
130602
dataset_final.csv
Normal file
130602
dataset_final.csv
Normal file
File diff suppressed because it is too large
Load Diff
54109
dataset_test_final.csv
Normal file
54109
dataset_test_final.csv
Normal file
File diff suppressed because it is too large
Load Diff
196
live.py
Normal file
196
live.py
Normal file
@ -0,0 +1,196 @@
|
||||
|
||||
from scapy.all import sniff, IP, UDP, ICMP, DNS, scapy
|
||||
import time
|
||||
import cachetools
|
||||
import multiprocessing
|
||||
import sys
|
||||
import re
|
||||
import tornado.httpclient
|
||||
|
||||
|
||||
port_server=8888
|
||||
ip_addres_server='127.0.0.1'
|
||||
|
||||
|
||||
def check_data(line,ip_ori,blackList,ttlCache_blacklist):
|
||||
http_client = tornado.httpclient.HTTPClient()
|
||||
|
||||
line = line + "||" + ip_ori
|
||||
try:
|
||||
if ip_ori not in blackList:
|
||||
response = http_client.fetch(
|
||||
f"http://{ip_addres_server}:{port_server}/check",
|
||||
method="POST",
|
||||
body=line,
|
||||
headers={"Content-Type": "text/plain"},
|
||||
)
|
||||
predict = response.body.decode("utf-8")
|
||||
if round(int(predict)) == 1:
|
||||
unix_time = time.time()
|
||||
blackList[ip_ori] = unix_time + ttlCache_blacklist
|
||||
|
||||
except Exception as e:
|
||||
pass
|
||||
finally:
|
||||
http_client.close()
|
||||
|
||||
|
||||
def waitresponsefunction(wait4response,lock,blackList,ttlCache_blacklist):
|
||||
while 1:
|
||||
lock.acquire()
|
||||
to_remove = []
|
||||
if wait4response:
|
||||
for key in wait4response.keys():
|
||||
unix_time = time.time()
|
||||
value = wait4response[key]
|
||||
if float(value['expire']) < unix_time:
|
||||
ip_ori = key.split("||")[0]
|
||||
check_data(value['line'],ip_ori,blackList,ttlCache_blacklist)
|
||||
to_remove.append(key)
|
||||
break
|
||||
|
||||
for key in to_remove:
|
||||
wait4response.pop(key)
|
||||
|
||||
to_remove = []
|
||||
|
||||
if blackList:
|
||||
for key in blackList.keys():
|
||||
unix_time = time.time()
|
||||
value = blackList[key]
|
||||
if float(value) < unix_time:
|
||||
to_remove.append(key)
|
||||
break
|
||||
|
||||
for key in to_remove:
|
||||
blackList.pop(key)
|
||||
lock.release()
|
||||
|
||||
|
||||
def process_packet(stateTable,wait4response,lock,maxWaitResponse,ip_addres_dst,blackList,ttlCache_blacklist,packet):
|
||||
|
||||
|
||||
if IP in packet and DNS not in packet:
|
||||
ip_pkt = packet[IP]
|
||||
ip_id = ip_pkt.id
|
||||
packet_size = len(packet)
|
||||
current_time = packet.time
|
||||
|
||||
|
||||
if UDP in packet and len(packet.layers()) >= 4 and str(ip_pkt.dst) == ip_addres_dst:
|
||||
udp_pkt = packet[UDP]
|
||||
src_port = udp_pkt.sport
|
||||
dest_port = udp_pkt.dport
|
||||
name = str(ip_pkt.src) + '||' + str(src_port) + '||' + str(ip_pkt.dst) + '||' + 'UDP'
|
||||
name2waiting = str(ip_pkt.src) + '||' + str(src_port) + '||' + str(ip_pkt.dst) + '||' + str(dest_port) + '||' + 'UDP' + '||' + str(ip_id)
|
||||
if name in stateTable:
|
||||
|
||||
|
||||
valor = stateTable.pop(name)
|
||||
stateTable[name] = valor
|
||||
|
||||
|
||||
unix_time = time.time()
|
||||
##State of last package udp from that IP
|
||||
last_time_pkg = stateTable[name]['last']
|
||||
last_port = stateTable[name]['last_port']
|
||||
first_connection = stateTable[name]['first_connection']
|
||||
first_connection_total = stateTable[name]['first_connection_total']
|
||||
##Calculate values of current package
|
||||
multiple_ports = 1 if last_port != dest_port else 0
|
||||
duration = str(current_time-first_connection) if last_port != dest_port else str(0)
|
||||
|
||||
##update value of connection
|
||||
first_connection_aux = current_time if last_port != dest_port else first_connection
|
||||
stateTable[name]['first_connection'] = first_connection_aux
|
||||
stateTable[name]['last'] = current_time
|
||||
stateTable[name]['last_port'] = dest_port
|
||||
##Add to waiting
|
||||
line_to_csv = str(packet_size) + ',0,' + duration + ',' + str(current_time-first_connection_total) + ',' + str(current_time-last_time_pkg) + ',' + str(multiple_ports)
|
||||
wait4response[name2waiting] = {'line': line_to_csv, 'expire': unix_time + maxWaitResponse}
|
||||
|
||||
|
||||
else:
|
||||
|
||||
unix_time = time.time()
|
||||
stateTable[name] = {'last':current_time, 'last_port':dest_port, 'first_connection' : current_time, 'first_connection_total' : current_time }
|
||||
|
||||
line_to_csv = str(packet_size) + ',0,0,0,0,0'
|
||||
wait4response[name2waiting] = {'line': line_to_csv, 'expire': unix_time + maxWaitResponse}
|
||||
|
||||
|
||||
|
||||
if ICMP in packet:
|
||||
icmp_pkt = packet[ICMP]
|
||||
icmp_type = icmp_pkt.type
|
||||
icmp_code = icmp_pkt.code
|
||||
if icmp_code == 3 and icmp_type == 3:
|
||||
if 'IP in ICMP' in icmp_pkt:
|
||||
src_ip = icmp_pkt['IP in ICMP'].src
|
||||
dest_ip = icmp_pkt['IP in ICMP'].dst
|
||||
inside_id_ip = icmp_pkt['IP in ICMP'].id
|
||||
if 'UDP in ICMP' in icmp_pkt and str(dest_ip) == ip_addres_dst:
|
||||
src_port = icmp_pkt['UDP in ICMP'].sport
|
||||
dest_port = icmp_pkt['UDP in ICMP'].dport
|
||||
name2waiting = str(src_ip) + '||' + str(src_port) + '||' + str(dest_ip) + '||' + str(dest_port) + '||' + 'UDP' + '||' + str(inside_id_ip)
|
||||
lock.acquire()
|
||||
if name2waiting in wait4response:
|
||||
auxLine = wait4response[name2waiting]['line']
|
||||
auxSplit = auxLine.split(",")
|
||||
auxSplit[1] = str(packet_size)
|
||||
auxFinal = ','.join(auxSplit)
|
||||
check_data(auxFinal,src_ip,blackList,ttlCache_blacklist)
|
||||
wait4response.pop(name2waiting)
|
||||
lock.release()
|
||||
|
||||
|
||||
p_ipv4 = r'^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'
|
||||
|
||||
|
||||
def is_double(string):
|
||||
try:
|
||||
float(string)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
arguments = sys.argv
|
||||
maxSizeCache = 0
|
||||
ttlCache = 0
|
||||
maxWaitResponse = 0
|
||||
ip_addres_dst = ''
|
||||
ttlCache_blacklist= 0
|
||||
|
||||
if len(arguments)==6 and arguments[1].isdigit() and arguments[2].isdigit() and is_double(arguments[3]):
|
||||
maxSizeCache = int(arguments[1])
|
||||
ttlCache = int(arguments[2])
|
||||
ttlCache_blacklist = int(arguments[3])
|
||||
maxWaitResponse = float(arguments[4])
|
||||
if re.match(p_ipv4,arguments[5]):
|
||||
ip_addres_dst = arguments[5]
|
||||
else:
|
||||
print("Invalid IP")
|
||||
sys.exit()
|
||||
else:
|
||||
print('Invalid arguments')
|
||||
sys.exit()
|
||||
|
||||
|
||||
stateTable = cachetools.TTLCache(maxsize=maxSizeCache, ttl=ttlCache)
|
||||
admin = multiprocessing.Manager()
|
||||
lock = multiprocessing.Lock()
|
||||
wait4response = admin.dict({})
|
||||
blackList = admin.dict({})
|
||||
|
||||
process = multiprocessing.Process(target=waitresponsefunction, args=(wait4response,lock,blackList,ttlCache_blacklist))
|
||||
process.start()
|
||||
|
||||
|
||||
sniff(filter="udp or icmp", prn=lambda packet: process_packet(stateTable,wait4response,lock,maxWaitResponse,ip_addres_dst,blackList,ttlCache_blacklist,packet))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
BIN
models/DT.pkl
Normal file
BIN
models/DT.pkl
Normal file
Binary file not shown.
BIN
models/MLP.pkl
Normal file
BIN
models/MLP.pkl
Normal file
Binary file not shown.
BIN
models/SVC.pkl
Normal file
BIN
models/SVC.pkl
Normal file
Binary file not shown.
169
sniffer.py
Executable file
169
sniffer.py
Executable file
@ -0,0 +1,169 @@
|
||||
### Escanear todo
|
||||
### nmap -sU -p- -Pn 192.168.10.2
|
||||
### Un unico puertos o intervalos
|
||||
### nmap -sU -p 80 -T4 -Pn 192.168.10.2
|
||||
### Con intervalor de envio
|
||||
### nmap -sU -p 80-85 -T4 -Pn --scan-delay 3 192.168.10.2
|
||||
### Puertos mas comunes
|
||||
### nmap -sU -p 80-85 -Pn -F 192.168.10.2
|
||||
### Tamaño payload
|
||||
### nmap -sU -p 80-85 -Pn --data-length 64 192.168.10.2
|
||||
|
||||
|
||||
from scapy.all import sniff, IP, UDP, ICMP, scapy
|
||||
import time
|
||||
import cachetools
|
||||
import multiprocessing
|
||||
import sys
|
||||
import re
|
||||
|
||||
|
||||
|
||||
def waitresponsefunction(wait4response, lock):
|
||||
while 1:
|
||||
lock.acquire()
|
||||
to_remove = []
|
||||
if wait4response:
|
||||
for key in wait4response.keys():
|
||||
unix_time = time.time()
|
||||
value = wait4response[key]
|
||||
if float(value['expire']) < unix_time:
|
||||
print(len(wait4response))
|
||||
ip_ori = key.split("||")[0]
|
||||
file_name = "dataset_test.txt"
|
||||
with open(file_name, "a") as file:
|
||||
file.write(ip_ori + ',' + value['line'] + "\n")
|
||||
#wait4response.pop(key)
|
||||
to_remove.append(key)
|
||||
break
|
||||
for key in to_remove:
|
||||
wait4response.pop(key)
|
||||
lock.release()
|
||||
|
||||
|
||||
|
||||
|
||||
def process_packet(stateTable,wait4response,lock,maxWaitResponse,ip_addres_dst,packet):
|
||||
|
||||
|
||||
if IP in packet:
|
||||
ip_pkt = packet[IP]
|
||||
ip_id = ip_pkt.id
|
||||
packet_size = len(packet)
|
||||
current_time = packet.time
|
||||
|
||||
|
||||
if UDP in packet and len(packet.layers()) >= 4 and str(ip_pkt.dst) == ip_addres_dst:
|
||||
udp_pkt = packet[UDP]
|
||||
src_port = udp_pkt.sport
|
||||
dest_port = udp_pkt.dport
|
||||
name = str(ip_pkt.src) + '||' + str(src_port) + '||' + str(ip_pkt.dst) + '||' + 'UDP'
|
||||
name2waiting = str(ip_pkt.src) + '||' + str(src_port) + '||' + str(ip_pkt.dst) + '||' + str(dest_port) + '||' + 'UDP' + '||' + str(ip_id)
|
||||
|
||||
if name in stateTable:
|
||||
|
||||
##Increase of connection ##Lock????
|
||||
valor = stateTable.pop(name)
|
||||
stateTable[name] = valor
|
||||
|
||||
|
||||
unix_time = time.time()
|
||||
##State of last package udp from that IP
|
||||
last_time_pkg = stateTable[name]['last']
|
||||
last_port = stateTable[name]['last_port']
|
||||
first_connection = stateTable[name]['first_connection']
|
||||
first_connection_total = stateTable[name]['first_connection_total']
|
||||
##Calculate values of current package
|
||||
multiple_ports = 1 if last_port != dest_port else 0
|
||||
duration = str(current_time-first_connection) if last_port != dest_port else str(0)
|
||||
|
||||
##update value of connection
|
||||
first_connection_aux = current_time if last_port != dest_port else first_connection
|
||||
stateTable[name]['first_connection'] = first_connection_aux
|
||||
stateTable[name]['last'] = current_time
|
||||
stateTable[name]['last_port'] = dest_port
|
||||
##Add to waiting
|
||||
line_to_csv = str(packet_size) + ',0,' + duration + ',' + str(current_time-first_connection_total) + ',' + str(current_time-last_time_pkg) + ',0,' + str(multiple_ports)
|
||||
wait4response[name2waiting] = {'line': line_to_csv, 'expire': unix_time + maxWaitResponse}
|
||||
|
||||
|
||||
else:
|
||||
|
||||
unix_time = time.time()
|
||||
stateTable[name] = {'last':current_time, 'last_port':dest_port, 'first_connection' : current_time, 'first_connection_total' : current_time }
|
||||
|
||||
line_to_csv = str(packet_size) + ',0,0,0,0,0,0'
|
||||
wait4response[name2waiting] = {'line': line_to_csv, 'expire': unix_time + maxWaitResponse}
|
||||
|
||||
|
||||
|
||||
if ICMP in packet:
|
||||
icmp_pkt = packet[ICMP]
|
||||
icmp_type = icmp_pkt.type
|
||||
icmp_code = icmp_pkt.code
|
||||
if icmp_code == 3 and icmp_type == 3:
|
||||
if 'IP in ICMP' in icmp_pkt:
|
||||
src_ip = icmp_pkt['IP in ICMP'].src
|
||||
dest_ip = icmp_pkt['IP in ICMP'].dst
|
||||
inside_id_ip = icmp_pkt['IP in ICMP'].id
|
||||
if 'UDP in ICMP' in icmp_pkt and str(dest_ip) == ip_addres_dst:
|
||||
src_port = icmp_pkt['UDP in ICMP'].sport
|
||||
dest_port = icmp_pkt['UDP in ICMP'].dport
|
||||
name2waiting = str(src_ip) + '||' + str(src_port) + '||' + str(dest_ip) + '||' + str(dest_port) + '||' + 'UDP' + '||' + str(inside_id_ip)
|
||||
lock.acquire()
|
||||
if name2waiting in wait4response:
|
||||
auxLine = wait4response[name2waiting]['line']
|
||||
auxSplit = auxLine.split(",")
|
||||
auxSplit[1] = str(packet_size)
|
||||
auxSplit[5] = str(1)
|
||||
auxFinal = ','.join(auxSplit)
|
||||
file_name = "dataset_test.txt"
|
||||
with open(file_name, "a") as file:
|
||||
file.write(str(src_ip) + ',' + auxFinal + "\n")
|
||||
wait4response.pop(name2waiting)
|
||||
lock.release()
|
||||
|
||||
|
||||
p_ipv4 = r'^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'
|
||||
|
||||
|
||||
def is_double(string):
|
||||
try:
|
||||
float(string)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
arguments = sys.argv
|
||||
maxSizeCache = 0
|
||||
ttlCache = 0
|
||||
maxWaitResponse = 0
|
||||
ip_addres_dst = ''
|
||||
|
||||
|
||||
if len(arguments)==5 and arguments[1].isdigit() and arguments[2].isdigit() and is_double(arguments[3]):
|
||||
maxSizeCache = int(arguments[1])
|
||||
ttlCache = int(arguments[2])
|
||||
maxWaitResponse = float(arguments[3])
|
||||
if re.match(p_ipv4,arguments[4]):
|
||||
ip_addres_dst = arguments[4]
|
||||
else:
|
||||
print("Invalid IP")
|
||||
sys.exit()
|
||||
else:
|
||||
print('Invalid arguments')
|
||||
sys.exit()
|
||||
|
||||
|
||||
stateTable = cachetools.TTLCache(maxsize=maxSizeCache, ttl=ttlCache)
|
||||
admin = multiprocessing.Manager()
|
||||
lock = multiprocessing.Lock()
|
||||
wait4response = admin.dict({})
|
||||
|
||||
process = multiprocessing.Process(target=waitresponsefunction, args=(wait4response,lock))
|
||||
process.start()
|
||||
|
||||
|
||||
sniff(filter="udp or icmp", prn=lambda packet: process_packet(stateTable,wait4response,lock,maxWaitResponse,ip_addres_dst,packet))
|
Loading…
Reference in New Issue
Block a user