Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
Build 248 - Fix issue #123. Nmap scans now use run_cmd like all other…
Browse files Browse the repository at this point in the history
… commands.

This allows output to be saved to files. After command is completed the file is parsed with NmapParser.parse_fromfile(file).
Cleaned up some code that has been commented out for a long time.
  • Loading branch information
sethsec committed Apr 28, 2020
1 parent 9481708 commit 2a82a9a
Show file tree
Hide file tree
Showing 6 changed files with 13 additions and 188 deletions.
2 changes: 1 addition & 1 deletion celerystalk
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ import csv

from lib.nmap import nmapcommand

build=str(247)
build=str(248)


def print_banner():
Expand Down
1 change: 1 addition & 0 deletions lib/csimport.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,7 @@ def process_qualys_data(qualys_port_services,workspace,target=None):


def process_nmap_data(nmap_report,workspace, target=None):
print("in proc nmap data")
workspace_mode = lib.db.get_workspace_mode(workspace)[0][0]
services_file = open('/etc/services', mode='r')
services_file_data = services_file.readlines()
Expand Down
4 changes: 2 additions & 2 deletions lib/nmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def nmap_scan_subdomain_host(vhost,workspace,simulation,output_base_dir,config_f
except:
os.makedirs(output_host_dir)

output_file = os.path.normpath(os.path.join(output_host_dir, vhost + "_nmap_tcp_scan.txt"))
output_file = os.path.normpath(os.path.join(output_host_dir, vhost + "_nmap_tcp_scan"))
if not vhost_explicitly_out_of_scope:
#print(config_nmap_options)
cmd_name = "nmap_tcp_scan"
Expand All @@ -39,7 +39,7 @@ def nmap_scan_subdomain_host(vhost,workspace,simulation,output_base_dir,config_f
task_id = uuid()
utils.create_task(cmd_name, populated_command, vhost, output_file, workspace, task_id)
result = chain(
tasks.cel_nmap_scan.si(cmd_name, populated_command, vhost, config_nmap_options, celery_path, task_id,workspace).set(task_id=task_id),
tasks.run_cmd.si(cmd_name, populated_command, celery_path, task_id,output_file=output_file,process_nmap=True).set(task_id=task_id),
)()

def nmapcommand(simulation,targets,config_file=None):
Expand Down
129 changes: 0 additions & 129 deletions lib/scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,134 +426,6 @@ def process_url(url, workspace, output_dir, arguments,config_file=None):
print("[!] {0} is explicitly marked as out of scope. Skipping...".format(vhost))


def process_db_services(output_base_dir, simulation, workspace, target=None,host=None,config_file=None):
celery_path = sys.path[0]
config, supported_services = config_parser.read_config_ini(config_file)
task_id_list = []
total_tasks_num = 0
if host:
target = db.get_vhost_ip(host)
try:
os.stat(output_base_dir)
except:
print("[+] Output directory does not exist. Creating " + output_base_dir)
os.makedirs(output_base_dir)
#unique_hosts = db.get_unique_hosts(workspace)
unique_unscanned_vhosts = db.get_inscope_unsubmitted_vhosts(workspace)
for row in unique_unscanned_vhosts:

vhost = row[0]
#print("in proccess_db_services - vhost:" + vhost)
vhost_ip = db.get_vhost_ip(vhost,workspace)[0]
#print(target)
#print(vhost_ip)
#print(str(vhost_ip))

if (IPAddress(vhost_ip[0]) == target) or (target is None):
host_dir = output_base_dir + vhost
host_data_dir = host_dir + "/celerystalkOutput/"
# Creates something like /pentest/10.0.0.1, /pentest/10.0.0.2, etc.
utils.create_dir_structure(vhost, host_dir)
#Next two lines create the file that will contain each command that was executed. This is not the audit log,
#but a log of commands that can easily be copy/pasted if you need to run them again.
summary_file_name = host_data_dir + "ScanSummary.log"
summary_file = open(summary_file_name, 'a')

#THIS is just a work around until i have a real solution. Really, UDP scans should be done
#For every host in the scanned host list, launch a quick UDP scan (top 100 ports)
scan_output_base_host_filename = host_data_dir + vhost

for (cmd_name, cmd) in config.items("nmap-commands"):
if cmd_name == "udp_scan":
#print(cmd_name,cmd)
outfile = scan_output_base_host_filename + "_" + cmd_name
populated_command = cmd.replace("[TARGET]", vhost).replace("[OUTPUT]", outfile)
populated_command = replace_user_config_options(config_file, populated_command)

#print(cmd)

#cmd_name = "udp-top100"
#populated_command = 'nmap -sV -sC -Pn -sU --top-ports 100 -oN {0}_nmap_UDP_service_scan.txt -oX {0}_nmap_UDP_service_scan.xml {1}'.format(
# scan_output_base_host_filename, vhost)
if simulation:
populated_command = "#" + populated_command
#outfile = scan_output_base_host_filename + "_nmap_UDP_service_scan.txt"
task_id = uuid()
utils.create_task(cmd_name, populated_command, vhost, outfile + ".txt", workspace, task_id)
result = chain(
# insert a row into the database to mark the task as submitted. a subtask does not get tracked
# in celery the same way a task does, for instance, you can't find it in flower
#tasks.cel_create_task.subtask(args=(cmd_name, populated_command, vhost, outfile + ".txt", workspace, task_id)),

# run the command. run_task takes care of marking the task as started and then completed.
# The si tells run_cmd to ignore the data returned from a previous task
tasks.run_cmd.si(cmd_name, populated_command,celery_path,task_id).set(task_id=task_id),

)() # .apply_async()
task_id_list.append(result.task_id)


if not simulation:
db.update_vhosts_submitted(vhost, vhost, workspace, 1)



#print "IP Address: {0}".format(vhost)
db_services = db.get_all_services_for_ip(vhost_ip[0], workspace)

for db_service in db_services:
(ip, scanned_service_port, scanned_service_protocol, scanned_service_name,product,version,extra_info,workspace) = db_service

scan_output_base_file_name = host_data_dir + vhost + "_" + str(scanned_service_port) + "_" + scanned_service_protocol + "_"

#If the service name is not in the supported service list, give the user notice so they can add the service
# and add some commands to the service. This is a major GAP right now. If the service is not in the config,
# the script completely ignores it, which is not good!
if scanned_service_name not in supported_services:
print("[!] Nmap reports {0}:{1} is running: [{2}]. There are no commands to run against {2} in config.ini.".format(vhost, scanned_service_port, scanned_service_name))
summary_file.write("[!] Nmap reports {0}:{1} is running: [{2}]. There are no commands to run against {2} in config.ini\n".format(vhost, scanned_service_port, scanned_service_name))
#updated_port_scan = utils.nmap_follow_up_scan(vhost, scanned_service_port)
#scanned_service_name = updated_port_scan.hosts[0]._services[0].service
cmd_name = "nmap_service_scan"
populated_command = 'nmap -sV -sC -Pn -p {0} -oN {1}_nmap_service_scan.txt {2}'.format(
scanned_service_port, scan_output_base_file_name, vhost)
populated_command = replace_user_config_options(config_file, populated_command)

if simulation:
populated_command = "#" + populated_command

outfile = scan_output_base_file_name + "_nmap_service_scan.txt"

task_id = uuid()
utils.create_task(cmd_name, populated_command, vhost, outfile , workspace, task_id)
result = chain(
# insert a row into the database to mark the task as submitted. a subtask does not get tracked
# in celery the same way a task does, for instance, you can't find it in flower
#tasks.cel_create_task.subtask(args=(cmd_name, populated_command, vhost, outfile , workspace, task_id)),

# run the command. run_task takes care of marking the task as started and then completed.
# The si tells run_cmd to ignore the data returned from a previous task
tasks.run_cmd.si(cmd_name, populated_command,celery_path,task_id).set(task_id=task_id),

)() # .apply_async()

task_id_list.append(result.task_id)
else:
parse_config_and_send_commands_to_celery(scanned_service_name, scanned_service_port, scan_output_base_file_name, config, simulation, output_base_dir, host_dir, workspace, task_id_list,vhost,scanned_service_protocol)
#task_id_list = task_id_list + new_tasks_list
summary_file.close()

print("[+] Submitted {0} tasks to the queue.".format(len(task_id_list)))
total_tasks_num = total_tasks_num + len(task_id_list)
task_id_list = []
print("\n\n[+] Summary:\tSubmitted {0} tasks to the [{1}] workspace.".format(total_tasks_num,workspace))
print("[+]\t\tThere might be additional tasks added to the queue during post processing\n[+]")
print("[+]\t\tTo keep an eye on things, run one of these commands: \n[+]")
print("[+]\t\tcelerystalk query [watch]")
print("[+]\t\tcelerystalk query brief [watch]")
print("[+]\t\tcelerystalk query summary [watch]\n")



def parse_config_and_send_commands_to_celery(scanned_service_name, scanned_service_port, scan_output_base_file_name, config, simulation, output_base_dir, host_dir, workspace, task_id_list,ip,scanned_service_protocol):
"""
Expand Down Expand Up @@ -642,7 +514,6 @@ def create_dns_recon_tasks(domains,simulation,workspace,output_base_dir,out_of_s
# The si tells run_cmd to ignore the data returned from a previous task
tasks.run_cmd.si(cmd_name, populated_command,celery_path,task_id,process_domain_tuple=process_domain_tuple).set(task_id=task_id),
)() # .apply_async()
task_id_list.append(result.task_id)

total_tasks_num = total_tasks_num + len(task_id_list)
print("\n\n[+] Summary:\tSubmitted {0} tasks to the [{1}] workspace.".format(total_tasks_num, workspace))
Expand Down
6 changes: 3 additions & 3 deletions setup/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,10 @@ if [ "$DISTRO" == "kali" ]; then
echo 'deb [arch=amd64] https://download.docker.com/linux/debian buster stable' > /etc/apt/sources.list.d/docker.list
apt-get update -y
apt-get remove docker docker-engine docker.io containerd runc -y
apt-get install apt-transport-https ca-certificates curl wget gnupg2 software-properties-common docker-ce gobuster nikto cewl whatweb sqlmap nmap sslscan sslyze hydra medusa dnsrecon enum4linux ncrack crowbar onesixtyone smbclient redis-server seclists chromium python-pip python3-pip wpscan jq amass -y
apt-get install apt-transport-https ca-certificates curl wget gnupg2 software-properties-common docker-ce vim curl gobuster nikto cewl whatweb sqlmap nmap sslscan sslyze hydra medusa dnsrecon enum4linux ncrack crowbar onesixtyone smbclient redis-server seclists chromium python-pip python3-pip wpscan jq amass -y
else
apt-get update -y
apt-get install apt-transport-https ca-certificates curl wget gnupg2 software-properties-common gobuster nikto cewl whatweb sqlmap nmap sslscan sslyze hydra medusa dnsrecon enum4linux ncrack crowbar onesixtyone smbclient redis-server seclists chromium python-pip python3-pip wpscan jq amass -y
apt-get install apt-transport-https ca-certificates curl wget gnupg2 software-properties-common vim curl gobuster nikto cewl whatweb sqlmap nmap sslscan sslyze hydra medusa dnsrecon enum4linux ncrack crowbar onesixtyone smbclient redis-server seclists chromium python-pip python3-pip wpscan jq amass -y

fi
elif [ "$DISTRO" == "ubuntu" ]; then
Expand All @@ -72,7 +72,7 @@ elif [ "$DISTRO" == "ubuntu" ]; then
apt-get install wget docker.io python-pip python3-pip unzip redis-server chromium-bsu jq -y
else
apt-get update -y
apt-get install wget curl python-pip python3-pip unzip redis-server chromium-bsu jq -y
apt-get install wget curl vim python-pip python3-pip unzip redis-server chromium-bsu jq -y

fi
fi
Expand Down
59 changes: 6 additions & 53 deletions tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


@app.task
def run_cmd(command_name, populated_command,celery_path,task_id,path=None,process_domain_tuple=None):
def run_cmd(command_name, populated_command,celery_path,task_id,path=None,process_domain_tuple=None,process_nmap=None,output_file=None):
"""
:param command_name:
Expand Down Expand Up @@ -76,66 +76,19 @@ def run_cmd(command_name, populated_command,celery_path,task_id,path=None,proces
#putting this here because i want to parse scan tool output for urls, not subdomain tools output
parsers.generic_urlextract.extract_in_scope_urls_from_task_output(out)

if process_nmap:
nmap_xml = output_file + ".xml"
nmap_report = NmapParser.parse_fromfile(nmap_xml)
workspace = lib.db.get_current_workspace()[0][0]
lib.csimport.process_nmap_data(nmap_report, workspace)
return out

#post.post_process(populated_command, output_base_dir, workspace, ip)



@app.task()
def cel_create_task(*args,**kwargs):
command_name, populated_command, ip, output_dir, workspace, task_id = args
db_task = (task_id, 1, command_name, populated_command, ip, output_dir, 'SUBMITTED', workspace)
db.create_task(db_task)
#return populated_command


@app.task()
def cel_nmap_scan(cmd_name, populated_command, host, config_nmap_options, celery_path, task_id,workspace):
"""
:param cmd_name:
:param populated_command:
:param host:
:param config_nmap_options:
:param celery_path:
:param task_id:
:param workspace:
:return:
"""
# Without the sleep, some jobs were showing as submitted even though
# they were started. Not sure why.
#time.sleep(3)
path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(lib.scan.__file__)),".."))
audit_log = path + "/log/cmdExecutionAudit.log"
f = open(audit_log, 'a')
start_time = time.time()
start_time_int = int(start_time)
start_ctime = time.ctime(start_time)
start = timer()

print(populated_command)

print("[+] Kicking off nmap scan for " + host)
lib.db.update_task_status_started("STARTED", task_id, 0, start_time_int)
nm = NmapProcess(host, options=config_nmap_options)
rc = nm.run()
nmap_report = NmapParser.parse(nm.stdout)
end = timer()
end_ctime = time.ctime(end)
run_time = end - start
db.update_task_status_completed("COMPLETED", task_id, run_time)

#f.write("\n" + str(end_ctime) + "," + "CMD COMPLETED" + ","" + str(run_time) + " - " + populated_command + "\n")

f.write(str(start_ctime) + "," + str(end_ctime) + "," + str(run_time) + cmd_name + "\n")
f.close()
lib.csimport.process_nmap_data(nmap_report, workspace)
return nmap_report



@app.task()
def cel_process_db_services(output_base_dir, simulation, workspace):
lib.scan.process_db_services(output_base_dir, simulation, workspace)


0 comments on commit 2a82a9a

Please sign in to comment.