Delete unnecessary lines

This commit is contained in:
taizan-hokuto
2020-02-02 12:07:56 +09:00
parent f1d8393971
commit e8510f1116
5 changed files with 72 additions and 39 deletions

View File

@@ -13,9 +13,6 @@ headers = config.headers
REPLAY_URL = "https://www.youtube.com/live_chat_replay/" \
"get_live_chat_replay?continuation="
def ready_blocks(video_id, duration, div, callback):
if div <= 0: raise ValueError
def _divide(start, end, count):
min_interval = 120
if (not isinstance(start,int) or
@@ -36,6 +33,9 @@ def ready_blocks(video_id, duration, div, callback):
return sorted(list(set([int(start+interval*j)
for j in range(count) ])))
def ready_blocks(video_id, duration, div, callback):
if div <= 0: raise ValueError
async def _get_blocks( video_id, duration, div, callback):
async with aiohttp.ClientSession() as session:
futures = [_create_block(session, video_id, pos, seektime, callback)
@@ -70,7 +70,7 @@ def ready_blocks(video_id, duration, div, callback):
def download_chunk(callback, blocks):
async def _dl_distribute():
async def _allocate_workers():
workers = [
DownloadWorker(
fetch = _fetch,
@@ -85,8 +85,8 @@ def download_chunk(callback, blocks):
async def _fetch(continuation,session):
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
async with session.get(url,headers = config.headers) as resp:
text = await resp.text()
continuation, actions = parser.parse(json.loads(text))
chat_json = await resp.text()
continuation, actions = parser.parse(json.loads(chat_json))
if actions:
last = parser.get_offset(actions[-1])
first = parser.get_offset(actions[0])
@@ -96,5 +96,4 @@ def download_chunk(callback, blocks):
return continuation, [], None
loop = asyncio.get_event_loop()
loop.run_until_complete(
_dl_distribute())
loop.run_until_complete(_allocate_workers())

View File

@@ -1,4 +1,30 @@
class Block:
"""Block object represents virtual chunk of chatdata.
Parameter:
---------
pos : int
index of this block on block list.
first : int
videoOffsetTimeMs of chat_data[0]
last : int
videoOffsetTimeMs of the last chat_data current read.
(chat_data[-1])
this value increases as fetching chatdata progresses.
temp_last : int
temporary videoOffsetTimeMs of last chat data,
equals to first videoOffsetTimeMs of next block.
when download worker reaches this offset, the download will stop.
continuation : str
continuation param of last chat data.
chat_data : List
"""
def __init__(self, pos=0, first=0, last=0,
continuation='', chat_data=[]):
self.pos = pos

View File

@@ -1,23 +1,37 @@
from . import parser
class DownloadWorker:
"""
DownloadWorker : associates a download session with a block.
Parameter
----------
fetch : func
download function of asyncdl
block : Block
chunk of chat_data
"""
def __init__(self, fetch, block):
self.block = block
self.fetch = fetch
async def run(self, session):
"""Remove extra chats just after ready_blocks(). """
temp_last = self.block.temp_last
self.block.chat_data, continuation = self.cut(
self.block.chat_data,
self.block.continuation,
self.block.last,
temp_last )
"""download loop """
while continuation:
data, cont, fetched_last = await self.fetch(continuation, session)
data, continuation = self.cut(data, cont, fetched_last, temp_last)
self.block.chat_data.extend(data)
def cut(self, data, cont, fetched_last, temp_last):
"""Remove extra chats."""
if fetched_last < temp_last or temp_last == -1:
return data, cont
for i, line in enumerate(data):

View File

@@ -1,8 +1,3 @@
import asyncio
import aiohttp
import json
import traceback
from urllib.parse import quote
from . import asyncdl
from . import parser
from . import videoinfo
@@ -10,7 +5,6 @@ from . block import Block
from . duplcheck import duplicate_head, duplicate_tail, overwrap
from .. import config
from .. exceptions import InvalidVideoIdException
from .. paramgen import arcparam
logger = config.logger(__name__)
headers=config.headers
@@ -20,8 +14,8 @@ class Downloader:
self.video_id = video_id
self.duration = duration
self.div = div
self.blocks = []
self.callback = callback
self.blocks = []
def ready_blocks(self):
result = asyncdl.ready_blocks(