-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmalware_traffic_download.py
66 lines (52 loc) · 2.87 KB
/
malware_traffic_download.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import requests, re, urllib, urllib3, os, time
from zipfile import ZipFile as unzip
from threading import Thread
urllib3.disable_warnings()
headers = {
"User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36"
}
os.system('cls')
source_content = requests.get("https://www.malware-traffic-analysis.net/index.html", headers=headers).text
year_sort = ""
# View Select Year
cnt = 0
for i in source_content.split(" - ["):
cnt += 1
if "".join(re.findall(".+index\.html.+list_header\"\>(.+?)\<\/a\>",i)) not in "Click here":
year_sort += "".join(re.findall(".+index\.html.+list_header\"\>(.+?)\<\/a\>",i))
if cnt != len(source_content.split(" - [")):
year_sort += ", "
print("[+] Select Years : " + year_sort)
year_input = input("\n몇 년도 PCAP을 다운로드 할까요?\n\n: ")
malware_url = requests.get("https://www.malware-traffic-analysis.net/" + str(year_input) + "/index.html", headers=headers).text
# N년도 글 가져오기
content_text = "".join([ "URL : "+"https://www.malware-traffic-analysis.net/"+str(year_input)+"/%s\nTitle : %s\n\n" % x for x in re.findall(r".+--.+\<a href\=\"(.+?)\" class\=\"main_menu\"\>(.*?)\<\/a\>", malware_url)])
# 가져온 글 안에서의 pcap 링크
content_url = re.findall("https?.+?\.html",content_text)
# 저장 경로 설정
os.system('mkdir "%userprofile%\desktop\Pcap Analysis"')
path_follow = os.environ['userprofile']+"\desktop\Pcap Analysis\\"
print("[+] 전체 개수는 " + str(len(content_url)) + "개 입니다.")
def PCAPDown():
cnt = 0
for result_i in content_url:
try:
F_URL = "".join(re.findall("https?.+?\/index.*?\.html", result_i))
T_URL = "".join(re.findall("(https?.+?\/)index.*?\.html", result_i))
page_value = requests.get(F_URL).text
urllib.request.urlretrieve("".join(T_URL+str(re.findall("href\=\"(.+?pcap.+zip)\"", page_value)[0])), path_follow + "".join(re.findall("href\=\"(.+?pcap.+zip)\"", page_value)[0]))
print("[+] " + "".join(re.findall("href\=\"(.+?pcap.+zip)\"", page_value)[0] + " 파일이 다운로드 완료 되었습니다!"))
except IndexError:
cnt += 1
print("\n[-] " + F_URL + " URL에는 PCAP이 존재하지 않습니다\n")
print("[-] pcap이 없는 사이트는 모두 " + str(cnt) + " 곳 입니다.")
## Unzip Password 'infected'
def unZip_Start():
for sub_path in os.listdir(path_follow):
for unzip_file in os.listdir(path_follow + sub_path):
target = path_follow+sub_path+"\\"+unzip_file
unzip(target).extractall(path_follow+sub_path, pwd=b"infected")
print("[+] " + target + " 파일 압축 해제 완료\n")
if __name__ == "__main__":
PCAPDown()
Thread(target=unZip_Start).start()