Handling JSONDecodeError

This commit is contained in:
taizan-hokuto
2020-02-11 11:26:29 +09:00
parent abb7565e3a
commit 339d04ad75
3 changed files with 41 additions and 15 deletions

View File

@@ -58,9 +58,18 @@ def ready_blocks(video_id, duration, div, callback):
video_id, seektime = seektime)
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
async with session.get(url, headers = headers) as resp:
text = await resp.text()
next_continuation, actions = parser.parse(json.loads(text))
for _ in range(3):
try:
async with session.get(url, headers = headers) as resp:
text = await resp.text()
next_continuation, actions = parser.parse(json.loads(text))
except json.JSONDecodeError:
print("JSONDecodeError occured")
await asyncio.sleep(1)
continue
break
else:
raise json.JSONDecodeError
if actions:
first = parser.get_offset(actions[0])
last = parser.get_offset(actions[-1])
@@ -97,8 +106,17 @@ def download_chunk(callback, blocks, video_id):
async def _fetch(continuation,session):
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
async with session.get(url,headers = config.headers) as resp:
chat_json = await resp.text()
for _ in range(3):
try:
async with session.get(url,headers = config.headers) as resp:
chat_json = await resp.text()
except json.JSONDecodeError:
print("JSONDecodeError occured")
await asyncio.sleep(1)
continue
break
else:
raise json.JSONDecodeError
continuation, actions = parser.parse(json.loads(chat_json))
if actions:
last = parser.get_offset(actions[-1])

View File

@@ -11,7 +11,7 @@ class DownloadWorker:
download function of asyncdl
block : Block :
Block object that includes chat_data
Block object associated with this worker
blocks : list :
List of Block(s)
@@ -19,7 +19,7 @@ class DownloadWorker:
video_id : str :
source_block : Block :
the block from which current downloading block is splitted
the Block from which current downloading block is splitted
"""
__slots__ = ['fetch', 'block', 'blocks', 'video_id', 'source_block']
@@ -54,7 +54,7 @@ class DownloadWorker:
def get_new_block(worker) -> Block:
worker.block.done = True
index,undone_block = get_undone_block(worker.blocks)
index,undone_block = search_undone_block(worker.blocks)
if undone_block is None:
return Block(continuation = None)
mean = (undone_block.end + undone_block.last)/2
@@ -70,7 +70,16 @@ def get_new_block(worker) -> Block:
worker.blocks.insert(index+1,new_block)
return new_block
def get_undone_block(blocks) -> (int, Block):
def search_undone_block(blocks) -> (int, Block):
"""
Returns
--------
ret_index : int :
index of Block download not completed in blocks .
ret_block : Block :
Block download not completed.
"""
max_remaining = 0
ret_block = None
ret_index = 0
@@ -85,7 +94,7 @@ def get_undone_block(blocks) -> (int, Block):
return ret_index, ret_block
def top_cut(chats, last) -> list:
for i,chat in enumerate(chats):
for i, chat in enumerate(chats):
if parser.get_offset(chat) > last:
return chats[i:]
return []
@@ -111,7 +120,7 @@ def after_dividing_process(source_block, block, chats, new_cont,
if fetched_first < source_block.last:
chats = top_cut(chats, source_block.last)
block.first = source_block.last
if block.end<fetched_last:
if block.end < fetched_last:
chats = bottom_cut(chats, block.end)
block.last = block.end
continuation = None
@@ -139,6 +148,6 @@ def _cut(block, chats, cont, fetched_last):
line_offset = parser.get_offset(line)
if line_offset >= block.end:
block.last = line_offset
block.remaining=0
block.done=True
block.remaining = 0
block.done = True
return chats[:i], None

View File

@@ -59,7 +59,7 @@ def check_duplicate_offset(chatdata):
print("creating table...")
create_table(chatdata,max_range)
print("searching duplicate data...")
print("searching duplicate offset data...")
return [{
"index" : i, "id" : tbl_id[i],
@@ -121,7 +121,6 @@ def duplicate_tail(blocks):
ret = [blocks[i] for i in range(0,len(blocks))
if i == 0 or not is_duplicate_tail(i) ]
#ret.append(blocks[-1])
return ret
def overwrap(blocks):