Skip to content

Commit

Permalink
Added tests
Browse files Browse the repository at this point in the history
  • Loading branch information
dtrenkenshu authored and dtrenkenshu committed Aug 15, 2017
1 parent f6097b2 commit a4c3409
Show file tree
Hide file tree
Showing 12 changed files with 342 additions and 55 deletions.
11 changes: 5 additions & 6 deletions openprocurement/bot/identification/databridge/bridge.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,7 @@ def __init__(self, config):
# init queues for workers
self.filtered_tender_ids_queue = Queue(maxsize=buffers_size) # queue of tender IDs with appropriate status
self.edrpou_codes_queue = Queue(maxsize=buffers_size) # queue with edrpou codes (Data objects stored in it)
self.upload_to_doc_service_queue = Queue(
maxsize=buffers_size) # queue with detailed info from EDR (Data.file_content)
self.upload_to_doc_service_queue = Queue(maxsize=buffers_size) # queue with info from EDR (Data.file_content)
# upload_to_tender_queue - queue with file's get_url
self.upload_to_tender_queue = Queue(maxsize=buffers_size)

Expand Down Expand Up @@ -231,11 +230,11 @@ def run(self):
self.jobs['edr_handler'].retry_edrpou_codes_queue.qsize() if self.jobs[
'edr_handler'] else 0,
self.upload_to_doc_service_queue.qsize(),
self.jobs['upload_file'].retry_upload_to_doc_service_queue.qsize() if self.jobs[
'upload_file'] else 0,
self.jobs['upload_file_to_doc_service'].retry_upload_to_doc_service_queue.qsize() if self.jobs[
'upload_file_to_doc_service'] else 0,
self.upload_to_tender_queue.qsize(),
self.jobs['upload_file'].retry_upload_to_tender_queue.qsize() if self.jobs[
'upload_file'] else 0
self.jobs['upload_file_to_tender'].retry_upload_to_tender_queue.qsize() if self.jobs[
'upload_file_to_tender'] else 0
))
counter += 1
for name, job in self.jobs.items():
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
class APIRateController:
# coding=utf-8
class APIRateController(object):
def __init__(self, increment_step=1, decrement_step=1):
self.increment_step = increment_step
self.decrement_step = decrement_step
self.time_between_requests = 0

def decrement(self):
self.time_between_requests -= self.decrement_step if self.decrement_step < self.time_between_requests else 0
self.time_between_requests -= self.decrement_step if self.decrement_step <= self.time_between_requests else 0
return self.time_between_requests

def increment(self):
self.time_between_requests += self.increment_step
return self.time_between_requests
return self.time_between_requests
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,6 @@ def __init__(self, upload_to_doc_service_queue, upload_to_tender_queue, process_
self.sleep_change_value = sleep_change_value
# retry queues for workers
self.retry_upload_to_doc_service_queue = Queue(maxsize=500)
self.retry_upload_to_tender_queue = Queue(maxsize=500)

# blockers
self.services_not_available = services_not_available

Expand Down Expand Up @@ -154,10 +152,10 @@ def _run(self):
def check_and_revive_jobs(self):
for name, job in self.immortal_jobs.items():
if job.dead:
# logger.warning("{} worker dead try restart".format(name), extra=journal_context(
# {"MESSAGE_ID": 'DATABRIDGE_RESTART_{}'.format(name.lower())}, {}))
logger.warning("{} worker dead try restart".format(name), extra=journal_context(
{"MESSAGE_ID": 'DATABRIDGE_RESTART_{}'.format(name.lower())}, {}))
self.immortal_jobs[name] = gevent.spawn(getattr(self, name))
# logger.info("{} worker is up".format(name))
logger.info("{} worker is up".format(name))

def shutdown(self):
self.exit = True
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# coding=utf-8
from munch import munchify
from gevent.queue import Queue
from gevent.queue import Queue, Empty
from retrying import retry

import logging.config
Expand Down Expand Up @@ -76,7 +76,7 @@ def try_upload_to_tender(self, tender_data, is_retry):
except Exception as e:
self.handle_error(e, tender_data, is_retry)
else:
self.succesfully_uploaded_to_tender(tender_data, is_retry)
self.successfully_uploaded_to_tender(tender_data, is_retry)

def update_headers_and_upload_to_tender(self, tender_data, is_retry):
if is_retry:
Expand Down Expand Up @@ -133,7 +133,7 @@ def handle_error(self, re, tender_data, is_retry):
self.retry_upload_to_tender_queue.put(tender_data)
self.upload_to_tender_queue.get()

def succesfully_uploaded_to_tender(self, tender_data, is_retry):
def successfully_uploaded_to_tender(self, tender_data, is_retry):
logger.info('Successfully uploaded file to {} doc_id: {}'.format(tender_data, tender_data.doc_id()),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_SUCCESS_UPLOAD_TO_TENDER}, tender_data.log_params()))
# delete current tender after successful upload file (to avoid reloading file)
Expand Down
25 changes: 5 additions & 20 deletions openprocurement/bot/identification/tests/bridge.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,18 @@
# -*- coding: utf-8 -*-

import unittest
import os

from openprocurement.bot.identification.tests.utils import custom_sleep
from requests import RequestException
import unittest

from mock import patch, MagicMock
from restkit import RequestError, ResourceError

from gevent.pywsgi import WSGIServer
from requests import RequestException
from bottle import Bottle, response, request
from restkit import RequestError

from openprocurement.bot.identification.databridge.bridge import EdrDataBridge
from openprocurement_client.client import TendersClientSync, TendersClient
from openprocurement.bot.identification.databridge.bridge import EdrDataBridge
from openprocurement.bot.identification.client import DocServiceClient, ProxyClient
from openprocurement.bot.identification.databridge.utils import check_412
from openprocurement.bot.identification.tests.utils import custom_sleep, AlmostAlwaysTrue

config = {
'main':
Expand All @@ -41,18 +38,6 @@
}


class AlmostAlwaysTrue(object):
def __init__(self, total_iterations=1):
self.total_iterations = total_iterations
self.current_iteration = 0

def __nonzero__(self):
if self.current_iteration < self.total_iterations:
self.current_iteration += 1
return bool(1)
return bool(0)


class BaseServersTest(unittest.TestCase):
"""Api server to test openprocurement.integrations.edr.databridge.bridge """

Expand Down
3 changes: 1 addition & 2 deletions openprocurement/bot/identification/tests/edr_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -715,9 +715,8 @@ def test_value_error(self, mrequest, gevent_sleep):
self.assertIsNotNone(mrequest.request_history[0].headers['X-Client-Request-ID'])
self.assertIsNotNone(mrequest.request_history[1].headers['X-Client-Request-ID'])

@requests_mock.Mocker()
@patch('gevent.sleep')
def test_value_error_mock(self, mrequest, gevent_sleep):
def test_value_error_mock(self, gevent_sleep):
"""Accept 'Gateway Timeout Error' while requesting /verify, then accept 200 status code."""
gevent_sleep.side_effect = custom_sleep
self.worker.retry_edr_ids_queue = MagicMock()
Expand Down
14 changes: 8 additions & 6 deletions openprocurement/bot/identification/tests/filter_tender.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,9 @@
import uuid
import unittest
import datetime

from gevent.hub import LoopExit
from gevent.queue import Queue
from openprocurement.bot.identification.databridge.constants import author
from openprocurement.bot.identification.databridge.filter_tender import FilterTenders
from openprocurement.bot.identification.databridge.utils import Data, ProcessTracker, item_key
from openprocurement.bot.identification.tests.utils import custom_sleep, generate_request_id, ResponseMock
from openprocurement.bot.identification.databridge.bridge import TendersClientSync
from openprocurement.bot.identification.databridge.sleep_change_value import APIRateController
from mock import patch, MagicMock
from time import sleep
from munch import munchify
Expand All @@ -18,6 +13,13 @@
from bottle import Bottle, response
from simplejson import dumps

from openprocurement.bot.identification.databridge.constants import author
from openprocurement.bot.identification.databridge.filter_tender import FilterTenders
from openprocurement.bot.identification.databridge.utils import Data, ProcessTracker, item_key
from openprocurement.bot.identification.tests.utils import custom_sleep, generate_request_id, ResponseMock
from openprocurement.bot.identification.databridge.bridge import TendersClientSync
from openprocurement.bot.identification.databridge.sleep_change_value import APIRateController

SERVER_RESPONSE_FLAG = 0
SPORE_COOKIES = ("a7afc9b1fc79e640f2487ba48243ca071c07a823d27"
"8cf9b7adf0fae467a524747e3c6c6973262130fac2b"
Expand Down
3 changes: 1 addition & 2 deletions openprocurement/bot/identification/tests/scanner.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
from gevent import monkey
from openprocurement.bot.identification.databridge.utils import ProcessTracker

monkey.patch_all()

Expand All @@ -15,7 +14,7 @@

from openprocurement.bot.identification.databridge.scanner import Scanner
from openprocurement.bot.identification.tests.utils import custom_sleep

from openprocurement.bot.identification.databridge.utils import ProcessTracker
from openprocurement.bot.identification.databridge.sleep_change_value import APIRateController


Expand Down
Loading

0 comments on commit a4c3409

Please sign in to comment.