diff --git a/README.md b/README.md index 0a019b54..f4ee8439 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ 2. 填写`jd_wstool` 监听地址ip - 如:监听地址1: http://192.168.0.101:5201,填在脚本开头 ipaddr= '192.168.0.101' + 如:监听地址1: http://192.168.0.101:5201 ,填在脚本开头 ipaddr= '192.168.0.101' 3. 运行脚本 @@ -97,9 +97,13 @@ 3. 第二关:图形验证码(任选以下一种类型,默认采用本地识别) - 1. 本地识别(再也不用花钱了👍),来自[@AntonVanke](https://github.com/AntonVanke) + 1. 本地识别(再也不用花钱了👍) + + * 来自[@AntonVanke](https://github.com/AntonVanke) 大佬提供的 [JDCaptcha](https://github.com/AntonVanke/JDCaptcha) 项目(已集成) - ,[测试图在最后一张](https://github.com/yqchilde/JDMemberCloseAccount#screenshots) , + ,[测试图在最后一张](https://github.com/yqchilde/JDMemberCloseAccount#screenshots) + + * [@dd178](https://github.com/dd178) 使用 [yolov4](https://github.com/AlexeyAB/darknet) 训练的权重 2. 收费的打码平台 @@ -131,12 +135,19 @@ pip3 install -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple/ ``` -3. 下载对应的浏览器驱动放到项目的`drivers`文件夹下面 +3. 下载必要文件 - * `chrome`请访问`chrome://version/`查看浏览器的版本,然后去 [chromedriver](http://chromedriver.storage.googleapis.com/index.html) + 3.1 下载对应的浏览器驱动放到项目的`drivers`文件夹下面 + + `chrome`请访问`chrome://version/`查看浏览器的版本,然后去 [chromedriver](http://chromedriver.storage.googleapis.com/index.html) 下载对应的版本/系统驱动(只需要保证版本号前三段一致即可,比如`91.0.4472.77`只需要保证`91.0.4472.x`就行),下载后解压,将其可执行文件(mac为`chromedriver` ,win为`chromedriver.exe`放在项目的`drivers`目录下即可) + + 3.2 如果想使用yolov4识别验证码 + 下载[权重文件](https://github.com/dd178/JDMemberCloseAccount/releases/download/v1.0.3/yolov4-custom.tar.gz) ,将`yolov4-custom.weights`解压至`yolov4`文件夹下 + + ### 2. 补充配置文件 * `config.yaml`文件 @@ -193,7 +204,7 @@ sms_captcha: aliyun_appcode: "" # image_captcha 图形验证码相关 -# image_captcha.type: 图形验证码类型,可选:local、cjy、tj +# image_captcha.type: 图形验证码类型,可选:local、cjy、tj、yolov4 # image_captcha.cjy_username: 超级鹰账号,仅在 image_captcha.type 为 cjy 时需要设置 # image_captcha.cjy_password: 超级鹰密码,仅在 image_captcha.type 为 cjy 时需要设置 # image_captcha.cjy_soft_id: 超级鹰软件ID,仅在 image_captcha.type 为 cjy 时需要设置 @@ -201,6 +212,9 @@ sms_captcha: # image_captcha.tj_username: 图鉴账号,仅在 image_captcha.type 为 tj 时需要设置 # image_captcha.tj_password: 图鉴密码,仅在 image_captcha.type 为 tj 时需要设置 # image_captcha.tj_type_id: 图鉴验证码类型,仅在 image_captcha.type 为 tj 时需要设置,且该项目指定为 19 +# yolov4_weights: yolov4权重文件路径,仅在 image_captcha.type 为 yolov4 时需要设置 +# yolov4_cfg: yolov4配置文件路径,仅在 image_captcha.type 为 yolov4 时需要设置 +# CUDA: 尝试使用CUDA加速,据说速度可提升几倍到几十倍,需要编译安装opencv,仅在 image_captcha.type 为 yolov4 时需要设置 image_captcha: type: "local" cjy_username: "" @@ -210,6 +224,9 @@ image_captcha: tj_username: "" tj_password: "" tj_type_id: 19 + yolov4_weights: "yolov4/yolov4-custom.weights" + yolov4_cfg: "yolov4/yolov4-custom.cfg" + CUDA: false # user-agent 用户代理,可自行配置 user-agent: diff --git a/captcha/jd_yolo_captcha.py b/captcha/jd_yolo_captcha.py new file mode 100644 index 00000000..6ab558bf --- /dev/null +++ b/captcha/jd_yolo_captcha.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# @Time : 2021-06-21 11:33 +# @Author : 178 +import cv2 +import base64 +import numpy as np +import os +import sys + +class JDyolocaptcha(object): + """ + yolov4类 + """ + def __init__(self, _config): + self.CONFIDENCE_THRESHOLD = 0.8 # 最低置信度 + self.NMS_THRESHOLD = 0.01 # 去除重复匹配 + from utils.logger import Log + self.logger = Log().logger + weights = _config['yolov4_weights'] + cfg = _config['yolov4_cfg'] + if os.path.exists(weights): + self.net = cv2.dnn.readNet(weights, cfg) + else: + self.logger.error("找不到权重文件") + sys.exit(1) + if _config['CUDA']: + self.net.setPreferableBackend(cv2.dnn.DNN_BACKEND_CUDA) + self.net.setPreferableTarget(cv2.dnn.DNN_TARGET_CUDA) + # 由于两张图片大小不一样,因此要使用两个不同大小的网络去识别,否则识别率极低 + self.cpc_model = cv2.dnn_DetectionModel(self.net) + self.pcp_model = cv2.dnn_DetectionModel(self.net) + self.cpc_model.setInputParams(size=(320, 320), scale=1/255, swapRB=True) # size为32的倍数,越大越慢,但不一定识别率越高 + self.pcp_model.setInputParams(size=(224, 128), scale=1/255, swapRB=True) # size为32的倍数 + + + def base64_conversion(self, data): + """ + base64转Mat + :param data: + :return: + """ + imgData = base64.b64decode(data.replace("data:image/jpg;base64,", "")) + nparr = np.frombuffer(imgData, np.uint8) + return cv2.imdecode(nparr, cv2.IMREAD_COLOR) + + + def identify(self, cpc, pcp): + """ + 识别验证码并返回坐标 + :param cpc: + :param pcp: + :return: + """ + try: + cpc_classes, cpc_scores, cpc_boxes = self.cpc_model.detect(cpc, self.CONFIDENCE_THRESHOLD, self.NMS_THRESHOLD) + pcp_classes, pcp_scores, pcp_boxes = self.pcp_model.detect(pcp, self.CONFIDENCE_THRESHOLD, self.NMS_THRESHOLD) + if pcp_classes[0] in cpc_classes: # 判断识别小图的结果是否在大图里面 + x1, y1, x2, y2 = cpc_boxes[cpc_classes.tolist().index(pcp_classes[0])] + if x2 - x1 < 200: # 防止结果为背景,因此要剔除x差值在200以上的结果 + r = (x1*2+x2)//2, (y1*2+y2)//2 + return True, r + else: + return False, (None, None) + else: + return False, (None, None) + except: + return False, (None, None) + + + def JDyolo(self, cpc_img_path_base64, pcp_show_picture_path_base64): + return self.identify(self.base64_conversion(cpc_img_path_base64), self.base64_conversion(pcp_show_picture_path_base64)) \ No newline at end of file diff --git a/config.yaml b/config.yaml index 2926e941..8f66d5b3 100644 --- a/config.yaml +++ b/config.yaml @@ -49,7 +49,7 @@ sms_captcha: aliyun_appcode: "" # image_captcha 图形验证码相关 -# image_captcha.type: 图形验证码类型,可选:local、cjy、tj +# image_captcha.type: 图形验证码类型,可选:local、cjy、tj、yolov4 # image_captcha.cjy_username: 超级鹰账号,仅在 image_captcha.type 为 cjy 时需要设置 # image_captcha.cjy_password: 超级鹰密码,仅在 image_captcha.type 为 cjy 时需要设置 # image_captcha.cjy_soft_id: 超级鹰软件ID,仅在 image_captcha.type 为 cjy 时需要设置 @@ -57,6 +57,9 @@ sms_captcha: # image_captcha.tj_username: 图鉴账号,仅在 image_captcha.type 为 tj 时需要设置 # image_captcha.tj_password: 图鉴密码,仅在 image_captcha.type 为 tj 时需要设置 # image_captcha.tj_type_id: 图鉴验证码类型,仅在 image_captcha.type 为 tj 时需要设置,且该项目指定为 19 +# yolov4_weights: yolov4权重文件路径,仅在 image_captcha.type 为 yolov4 时需要设置 +# yolov4_cfg: yolov4配置文件路径,仅在 image_captcha.type 为 yolov4 时需要设置 +# CUDA: 尝试使用CUDA加速,据说速度可提升几倍到几十倍,需要编译安装opencv,仅在 image_captcha.type 为 yolov4 时需要设置 image_captcha: type: "local" cjy_username: "" @@ -66,6 +69,9 @@ image_captcha: tj_username: "" tj_password: "" tj_type_id: 19 + yolov4_weights: "yolov4/yolov4-custom.weights" + yolov4_cfg: "yolov4/yolov4-custom.cfg" + CUDA: false # user-agent 用户代理,可自行配置 user-agent: diff --git a/main.py b/main.py index 28fc7942..6f66818b 100644 --- a/main.py +++ b/main.py @@ -10,6 +10,7 @@ from captcha.chaojiying import ChaoJiYing from captcha.tujian import TuJian from captcha.jd_captcha import JDcaptcha_base64 +from captcha.jd_yolo_captcha import JDyolocaptcha from utils.logger import Log from utils.config import get_config from utils.selenium_browser import get_browser @@ -93,6 +94,8 @@ def __init__(self): self.tj = TuJian(self.image_captcha_cfg) elif self.image_captcha_cfg["type"] == "local": pass + elif self.image_captcha_cfg["type"] == "yolov4": + self.JDyolo = JDyolocaptcha(self.image_captcha_cfg) else: WARN("请在config.yaml中补充image_captcha.type") @@ -451,8 +454,12 @@ def local_auto_identify_captcha_click(): pcp_show_picture_path_base64 = self.wait.until(EC.presence_of_element_located( (By.XPATH, '//*[@class="pcp_showPicture"]'))).get_attribute('src') # 正在识别验证码 - INFO("正在通过本地引擎识别") - res = JDcaptcha_base64(cpc_img_path_base64, pcp_show_picture_path_base64) + if self.image_captcha_cfg["type"] == "local": + INFO("正在通过本地引擎识别") + res = JDcaptcha_base64(cpc_img_path_base64, pcp_show_picture_path_base64) + else: + INFO("正在通过深度学习引擎识别") + res = self.JDyolo.JDyolo(cpc_img_path_base64, pcp_show_picture_path_base64) if res[0]: ActionChains(self.browser).move_to_element_with_offset( cpc_img, int(res[1][0] * zoom), @@ -475,7 +482,7 @@ def local_auto_identify_captcha_click(): return False # 识别点击,如果有一次失败将再次尝试一次,再失败就跳过 - if self.image_captcha_cfg["type"] == "local": + if self.image_captcha_cfg["type"] in ["local", "yolov4"]: if not local_auto_identify_captcha_click(): INFO("验证码位置点击错误,尝试再试一次") local_auto_identify_captcha_click() diff --git a/requirements.txt b/requirements.txt index 7a71ae04..81799eb1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,4 +7,5 @@ Pillow~=8.2.0 numpy~=1.20.3 urllib3~=1.26.5 baidu-aip==2.2.18.0 -websockets~=9.1 \ No newline at end of file +websockets~=9.1 +opencv_python~=4.5.2.54 \ No newline at end of file diff --git a/yolov4/yolov4-custom.cfg b/yolov4/yolov4-custom.cfg new file mode 100644 index 00000000..21fbfd3a --- /dev/null +++ b/yolov4/yolov4-custom.cfg @@ -0,0 +1,1164 @@ +[net] +# Testing +#batch=1 +#subdivisions=1 +# Training +batch=64 +subdivisions=32 +width=352 +height=352 +channels=3 +momentum=0.949 +decay=0.0005 +angle=0 +saturation = 1.5 +exposure = 1.5 +hue=.1 +flip=1 +#use_cuda_graph = 1 + +# CUDNN_HALF +loss_scale=128 + +learning_rate=0.001 +burn_in=1000 +max_batches = 100000 +policy=steps +steps=80000,90000 +scales=.1,.1 + +#cutmix=1 +mosaic=1 + +#:104x104 54:52x52 85:26x26 104:13x13 for 416 + +[convolutional] +batch_normalize=1 +filters=32 +size=3 +stride=1 +pad=1 +activation=mish + +# Downsample + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=2 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -2 + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=32 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -1,-7 + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +# Downsample + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=2 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -2 + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=64 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=64 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -1,-10 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +# Downsample + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=2 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -2 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=128 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -1,-28 + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +# Downsample + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=2 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -2 + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=256 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -1,-28 + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +# Downsample + +[convolutional] +batch_normalize=1 +filters=1024 +size=3 +stride=2 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -2 + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +[convolutional] +batch_normalize=1 +filters=512 +size=3 +stride=1 +pad=1 +activation=mish + +[shortcut] +from=-3 +activation=linear + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=mish + +[route] +layers = -1,-16 + +[convolutional] +batch_normalize=1 +filters=1024 +size=1 +stride=1 +pad=1 +activation=mish +stopbackward=800 + +########################## + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +### SPP ### +[maxpool] +stride=1 +size=5 + +[route] +layers=-2 + +[maxpool] +stride=1 +size=9 + +[route] +layers=-4 + +[maxpool] +stride=1 +size=13 + +[route] +layers=-1,-3,-5,-6 +### End SPP ### + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[upsample] +stride=2 + +[route] +layers = 85 + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -1, -3 + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[upsample] +stride=2 + +[route] +layers = 54 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[route] +layers = -1, -3 + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=256 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=256 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=128 +size=1 +stride=1 +pad=1 +activation=leaky + +########################## + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=256 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=495 +activation=linear + + +[yolo] +mask = 0,1,2 +anchors = 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401 +classes=160 +num=9 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +scale_x_y = 1.2 +iou_thresh=0.213 +cls_normalizer=1.0 +iou_normalizer=0.07 +iou_loss=ciou +nms_kind=greedynms +beta_nms=0.6 +max_delta=5 + + +[route] +layers = -4 + +[convolutional] +batch_normalize=1 +size=3 +stride=2 +pad=1 +filters=256 +activation=leaky + +[route] +layers = -1, -16 + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=256 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=512 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=495 +activation=linear + + +[yolo] +mask = 3,4,5 +anchors = 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401 +classes=160 +num=9 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +scale_x_y = 1.1 +iou_thresh=0.213 +cls_normalizer=1.0 +iou_normalizer=0.07 +iou_loss=ciou +nms_kind=greedynms +beta_nms=0.6 +max_delta=5 + + +[route] +layers = -4 + +[convolutional] +batch_normalize=1 +size=3 +stride=2 +pad=1 +filters=512 +activation=leaky + +[route] +layers = -1, -37 + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +batch_normalize=1 +filters=512 +size=1 +stride=1 +pad=1 +activation=leaky + +[convolutional] +batch_normalize=1 +size=3 +stride=1 +pad=1 +filters=1024 +activation=leaky + +[convolutional] +size=1 +stride=1 +pad=1 +filters=495 +activation=linear + + +[yolo] +mask = 6,7,8 +anchors = 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401 +classes=160 +num=9 +jitter=.3 +ignore_thresh = .7 +truth_thresh = 1 +random=1 +scale_x_y = 1.05 +iou_thresh=0.213 +cls_normalizer=1.0 +iou_normalizer=0.07 +iou_loss=ciou +nms_kind=greedynms +beta_nms=0.6 +max_delta=5