Skip to content

Commit

Permalink
bump to 1.3, fix pylint & flake
Browse files Browse the repository at this point in the history
  • Loading branch information
andrew-ld committed Mar 17, 2021
1 parent 04356ee commit f869dd3
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 19 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

setuptools.setup(
name="smart_tv_telegram",
version="1.2.1.dev0",
version="1.3.0.dev0",
setup_requires=["wheel"],
author="andrew-ld",
author_email="[email protected]",
Expand Down
4 changes: 2 additions & 2 deletions smart_tv_telegram/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from .bot import Bot


__version__ = "1.2.1"
__version_info__ = ("1", "2", "1")
__version__ = "1.3.0"
__version_info__ = ("1", "3", "0")
__author__ = "https://github.com/andrew-ld"


Expand Down
7 changes: 5 additions & 2 deletions smart_tv_telegram/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,13 @@
import logging
import argparse
import os.path
import typing

from smart_tv_telegram import Http, Mtproto, Config, Bot
from smart_tv_telegram.devices import FINDERS


def open_config(parser: argparse.ArgumentParser, arg: str) -> Config:
def open_config(parser: argparse.ArgumentParser, arg: str) -> typing.Optional[Config]:
if not os.path.exists(arg):
parser.error(f"The file `{arg}` does not exist")

Expand All @@ -24,13 +25,15 @@ def open_config(parser: argparse.ArgumentParser, arg: str) -> Config:
except configparser.Error as err:
parser.error(f"generic configparser error:\n{str(err)}")

return None


async def async_main(config: Config):
finders = [f() for f in FINDERS if f.is_enabled(config)]
mtproto = Mtproto(config)
http = Http(mtproto, config, finders)
bot = Bot(mtproto, config, http, finders)
http.set_on_stram_closed_handler(bot.get_on_stream_closed())
http.set_on_stream_closed_handler(bot.get_on_stream_closed())
bot.prepare()

await mtproto.start()
Expand Down
4 changes: 2 additions & 2 deletions smart_tv_telegram/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,11 @@ def __init__(self, mtproto: Mtproto, functions: typing.Dict[int, typing.Any]):
self._mtproto = mtproto
self._functions = functions

async def handle(self, ramains: float, chat_id: int, message_id: int, local_token: int):
async def handle(self, remains: float, chat_id: int, message_id: int, local_token: int):
if local_token in self._functions:
del self._functions[local_token]

await self._mtproto.reply_message(message_id, chat_id, f"download closed, {ramains:0.2f}% remains")
await self._mtproto.reply_message(message_id, chat_id, f"download closed, {remains:0.2f}% remains")


class TelegramStateMachine:
Expand Down
24 changes: 12 additions & 12 deletions smart_tv_telegram/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

class OnStreamClosed(abc.ABC):
@abc.abstractmethod
async def handle(self, ramains: float, chat_id: int, message_id: int, local_token: int):
async def handle(self, remains: float, chat_id: int, message_id: int, local_token: int):
raise NotImplementedError


Expand All @@ -36,7 +36,7 @@ class Http:
_tokens: typing.Set[int]
_downloaded_blocks: typing.Dict[int, typing.Set[int]]
_stream_debounce: typing.Dict[int, AsyncDebounce]
_stream_trasports: typing.Dict[int, typing.Set[asyncio.Transport]]
_stream_transports: typing.Dict[int, typing.Set[asyncio.Transport]]

def __init__(self, mtproto: Mtproto, config: Config, finders: typing.List[DeviceFinder]):
self._mtproto = mtproto
Expand All @@ -46,9 +46,9 @@ def __init__(self, mtproto: Mtproto, config: Config, finders: typing.List[Device
self._tokens = set()
self._downloaded_blocks = dict()
self._stream_debounce = dict()
self._stream_trasports = dict()
self._stream_transports = dict()

def set_on_stram_closed_handler(self, handler: OnStreamClosed):
def set_on_stream_closed_handler(self, handler: OnStreamClosed):
self._on_stream_closed = handler

async def start(self):
Expand Down Expand Up @@ -125,12 +125,12 @@ def _feed_downloaded_blocks(self, block_id: int, local_token: int):
downloaded_blocks = self._downloaded_blocks.setdefault(local_token, set())
downloaded_blocks.add(block_id)

def _feed_stream_trasport(self, local_token: int, transport: asyncio.Transport):
transports = self._stream_trasports.setdefault(local_token, set())
def _feed_stream_transport(self, local_token: int, transport: asyncio.Transport):
transports = self._stream_transports.setdefault(local_token, set())
transports.add(transport)

def _get_stream_transports(self, local_token: int) -> typing.Set[asyncio.Transport]:
return self._stream_trasports[local_token] if local_token in self._stream_trasports else set()
return self._stream_transports[local_token] if local_token in self._stream_transports else set()

async def _timeout_handler(self, message_id: int, chat_id: int, local_token: int, size: int):
_debounce: typing.Optional[AsyncDebounce] = None # avoid garbage collector
Expand All @@ -151,14 +151,14 @@ async def _timeout_handler(self, message_id: int, chat_id: int, local_token: int
_debounce = self._stream_debounce[local_token]
del self._stream_debounce[local_token]

if local_token in self._stream_trasports:
del self._stream_trasports[local_token]
if local_token in self._stream_transports:
del self._stream_transports[local_token]

remain_blocks_percentual = remain_blocks / blocks * 100
remain_blocks_perceptual = remain_blocks / blocks * 100
on_stream_closed = self._on_stream_closed

if isinstance(on_stream_closed, OnStreamClosed):
await on_stream_closed.handle(remain_blocks_percentual, chat_id, message_id, local_token)
await on_stream_closed.handle(remain_blocks_perceptual, chat_id, message_id, local_token)

if local_token in self._stream_debounce:
self._stream_debounce[local_token].reschedule()
Expand Down Expand Up @@ -253,7 +253,7 @@ async def _stream_handler(self, request: Request) -> typing.Optional[Response]:
if request.transport is None:
break

self._feed_stream_trasport(local_token, request.transport)
self._feed_stream_transport(local_token, request.transport)

if request.transport.is_closing():
break
Expand Down

0 comments on commit f869dd3

Please sign in to comment.