Skip to content

Commit

Permalink
修复 -s 参数 文件覆盖问题
Browse files Browse the repository at this point in the history
  • Loading branch information
Cl0udG0d committed Feb 5, 2021
1 parent 459e7e4 commit 6c870a5
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ __pycache__/
# C extensions
*.so
/venv

/save
debug.log
# Distribution / packaging
.Python
Expand Down
3 changes: 2 additions & 1 deletion scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,9 @@ def foxScan(target):
# 对搜集到的目标挨个进行扫描
req_pool = crawlergoMain.crawlergoGet(current_target)
req_pool.add(current_target)
tempFilename=hashlib.md5(current_target.encode("utf-8")).hexdigest()
# 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
threadPoolScan(req_pool, filename, target)
threadPoolScan(req_pool, tempFilename, target)
print("InPuT T4rGet {} Sc3n EnD#".format(target))
return

Expand Down
59 changes: 46 additions & 13 deletions test.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,56 @@
import re
import shutil

import requests
from subDomainsBrute import subDomainsBruteMain
from Sublist3r import Sublist3rMain
from Subfinder import subfinderMain
from OneForAll import oneforallMain
from CScan import CScan
from JSmessage.jsfinder import JSFinder
import hashlib
from crawlergo import crawlergoMain
from Xray import pppXray
import config
import sys
import getopt
import base
from ServerJiang.jiangMain import SendNotice
import os
import hashlib
from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED


def threadPoolScan(req_pool, filename, target):
print("req_pool num is {}".format(len(req_pool)))
thread = ThreadPoolExecutor(max_workers=config.ThreadNum)
i = 0
all_task = []
while len(req_pool) != 0:
# 将 req_pool 里的URL依次弹出并扫描
temp_url = req_pool.pop()
current_filename = hashlib.md5(temp_url.encode("utf-8")).hexdigest()
# 调用 xray 进行扫描并保存
# pppXray.xrayScan(temp_url, current_filename)
i += 1
one_t = thread.submit(pppXray.xrayScan, temp_url, current_filename)
all_task.append(one_t)
if i == 5 or len(req_pool) == 0:
i = 0
wait(all_task, return_when=ALL_COMPLETED)
all_task = []
base.mergeReport(filename)
SendNotice("{} 花溪九尾扫描完毕".format(target))

def foxScan():
# 进行子域名搜集
config.target_queue.put('127.0.0.1')
config.target_queue.put('http://testphp.vulnweb.com/')

while not config.target_queue.empty():
current_target = config.target_queue.get()
if base.checkBlackList(current_target):
# 对搜集到的目标挨个进行扫描
req_pool = crawlergoMain.crawlergoGet(current_target)
req_pool.add(current_target)
tempFilename=hashlib.md5(current_target.encode("utf-8")).hexdigest()
# 对目标网址使用 crawlergoGet 页面URL动态爬取,保存在 req_pool 集合里
threadPoolScan(req_pool, tempFilename, "aa")
print("InPuT T4rGet {} Sc3n EnD#".format("aa"))
return

def main():
subDomainsBruteMain.subDomainsBruteScan('wkj.work',"aa")
base.init()
foxScan()
# subDomainsBruteMain.subDomainsBruteScan('wkj.work',"aa")
return

if __name__ == '__main__':
Expand Down

0 comments on commit 6c870a5

Please sign in to comment.