Use httpx

This commit is contained in:
taizan-hokuto
2020-08-30 22:16:58 +09:00
parent 8012e1d191
commit 95f975c93d
20 changed files with 259 additions and 354 deletions

View File

@@ -1,9 +1,10 @@
import logging
from . import mylogger
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36'}
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36',
}
def logger(module_name: str, loglevel=None):
def logger(module_name: str, loglevel=logging.DEBUG):
module_logger = mylogger.get_logger(module_name, loglevel=loglevel)
return module_logger

View File

@@ -1,13 +1,13 @@
import aiohttp
import asyncio
import httpx
import json
import signal
import time
import traceback
import urllib.parse
from aiohttp.client_exceptions import ClientConnectorError
from concurrent.futures import CancelledError
from asyncio import Queue
from concurrent.futures import CancelledError
from .buffer import Buffer
from ..parser.live import Parser
from .. import config
@@ -22,7 +22,7 @@ MAX_RETRY = 10
class LiveChatAsync:
'''asyncio(aiohttp)を利用してYouTubeのライブ配信のチャットデータを取得する。
'''asyncioを利用してYouTubeのライブ配信のチャットデータを取得する。
Parameter
---------
@@ -161,11 +161,11 @@ class LiveChatAsync:
parameter for next chat data
'''
try:
async with aiohttp.ClientSession() as session:
async with httpx.AsyncClient(http2=True) as client:
while(continuation and self._is_alive):
continuation = await self._check_pause(continuation)
contents = await self._get_contents(
continuation, session, headers)
continuation, client, headers)
metadata, chatdata = self._parser.parse(contents)
timeout = metadata['timeoutMs'] / 1000
@@ -210,7 +210,7 @@ class LiveChatAsync:
self._video_id, 3, self._topchat_only)
return continuation
async def _get_contents(self, continuation, session, headers):
async def _get_contents(self, continuation, client, headers):
'''Get 'continuationContents' from livechat json.
If contents is None at first fetching,
try to fetch archive chat data.
@@ -219,7 +219,7 @@ class LiveChatAsync:
-------
'continuationContents' which includes metadata & chatdata.
'''
livechat_json = await self._get_livechat_json(continuation, session, headers)
livechat_json = await self._get_livechat_json(continuation, client, headers)
contents = self._parser.get_contents(livechat_json)
if self._first_fetch:
if contents is None or self._is_replay:
@@ -229,18 +229,18 @@ class LiveChatAsync:
continuation = arcparam.getparam(
self._video_id, self.seektime, self._topchat_only)
livechat_json = (await self._get_livechat_json(
continuation, session, headers))
continuation, client, headers))
reload_continuation = self._parser.reload_continuation(
self._parser.get_contents(livechat_json))
if reload_continuation:
livechat_json = (await self._get_livechat_json(
reload_continuation, session, headers))
reload_continuation, client, headers))
contents = self._parser.get_contents(livechat_json)
self._is_replay = True
self._first_fetch = False
return contents
async def _get_livechat_json(self, continuation, session, headers):
async def _get_livechat_json(self, continuation, client, headers):
'''
Get json which includes chat data.
'''
@@ -249,12 +249,11 @@ class LiveChatAsync:
status_code = 0
url = f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
for _ in range(MAX_RETRY + 1):
async with session.get(url, headers=headers) as resp:
try:
text = await resp.text()
livechat_json = json.loads(text)
resp = await client.get(url, headers=headers)
livechat_json = resp.json()
break
except (ClientConnectorError, json.JSONDecodeError):
except (httpx.HTTPError, json.JSONDecodeError):
await asyncio.sleep(1)
continue
else:

View File

@@ -1,4 +1,4 @@
import requests
import httpx
import json
import signal
import time
@@ -153,10 +153,10 @@ class LiveChat:
parameter for next chat data
'''
try:
with requests.Session() as session:
with httpx.Client(http2=True) as client:
while(continuation and self._is_alive):
continuation = self._check_pause(continuation)
contents = self._get_contents(continuation, session, headers)
contents = self._get_contents(continuation, client, headers)
metadata, chatdata = self._parser.parse(contents)
timeout = metadata['timeoutMs'] / 1000
chat_component = {
@@ -199,7 +199,7 @@ class LiveChat:
continuation = liveparam.getparam(self._video_id, 3)
return continuation
def _get_contents(self, continuation, session, headers):
def _get_contents(self, continuation, client, headers):
'''Get 'continuationContents' from livechat json.
If contents is None at first fetching,
try to fetch archive chat data.
@@ -209,7 +209,7 @@ class LiveChat:
'continuationContents' which includes metadata & chat data.
'''
livechat_json = (
self._get_livechat_json(continuation, session, headers)
self._get_livechat_json(continuation, client, headers)
)
contents = self._parser.get_contents(livechat_json)
if self._first_fetch:
@@ -219,18 +219,18 @@ class LiveChat:
self._fetch_url = "live_chat_replay/get_live_chat_replay?continuation="
continuation = arcparam.getparam(
self._video_id, self.seektime, self._topchat_only)
livechat_json = (self._get_livechat_json(continuation, session, headers))
livechat_json = (self._get_livechat_json(continuation, client, headers))
reload_continuation = self._parser.reload_continuation(
self._parser.get_contents(livechat_json))
if reload_continuation:
livechat_json = (self._get_livechat_json(
reload_continuation, session, headers))
reload_continuation, client, headers))
contents = self._parser.get_contents(livechat_json)
self._is_replay = True
self._first_fetch = False
return contents
def _get_livechat_json(self, continuation, session, headers):
def _get_livechat_json(self, continuation, client, headers):
'''
Get json which includes chat data.
'''
@@ -239,10 +239,9 @@ class LiveChat:
status_code = 0
url = f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
for _ in range(MAX_RETRY + 1):
with session.get(url, headers=headers) as resp:
with client:
try:
text = resp.text
livechat_json = json.loads(text)
livechat_json = client.get(url, headers=headers).json()
break
except json.JSONDecodeError:
time.sleep(1)

View File

@@ -1,6 +1,6 @@
import os
import re
import requests
import httpx
from base64 import standard_b64encode
from .chat_processor import ChatProcessor
from .default.processor import DefaultProcessor
@@ -108,7 +108,7 @@ class HTMLArchiver(ChatProcessor):
for item in message_items)
def _encode_img(self, url):
resp = requests.get(url)
resp = httpx.get(url)
return standard_b64encode(resp.content).decode()
def _set_emoji_table(self, item: dict):

View File

@@ -1,6 +1,5 @@
import aiohttp
import httpx
import asyncio
import json
from . import parser
from . block import Block
from . worker import ExtractWorker
@@ -55,7 +54,7 @@ def ready_blocks(video_id, duration, div, callback):
raise ValueError
async def _get_blocks(video_id, duration, div, callback):
async with aiohttp.ClientSession() as session:
async with httpx.AsyncClient(http2=True) as session:
tasks = [_create_block(session, video_id, seektime, callback)
for seektime in _split(-1, duration, div)]
return await asyncio.gather(*tasks)
@@ -65,9 +64,8 @@ def ready_blocks(video_id, duration, div, callback):
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
for _ in range(MAX_RETRY_COUNT):
try:
async with session.get(url, headers=headers) as resp:
text = await resp.text()
next_continuation, actions = parser.parse(json.loads(text))
resp = await session.get(url, headers=headers)
next_continuation, actions = parser.parse(resp.json())
break
except JSONDecodeError:
await asyncio.sleep(3)
@@ -106,7 +104,7 @@ def fetch_patch(callback, blocks, video_id):
)
for block in blocks
]
async with aiohttp.ClientSession() as session:
async with httpx.AsyncClient() as session:
tasks = [worker.run(session) for worker in workers]
return await asyncio.gather(*tasks)
@@ -114,9 +112,8 @@ def fetch_patch(callback, blocks, video_id):
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
for _ in range(MAX_RETRY_COUNT):
try:
async with session.get(url, headers=config.headers) as resp:
chat_json = await resp.text()
continuation, actions = parser.parse(json.loads(chat_json))
resp = await session.get(url, headers=config.headers)
continuation, actions = parser.parse(resp.json())
break
except JSONDecodeError:
await asyncio.sleep(3)

View File

@@ -1,6 +1,7 @@
from . block import Block
from . patch import fill, split
from ... paramgen import arcparam
from typing import Tuple
class ExtractWorker:
@@ -76,7 +77,7 @@ def _search_new_block(worker) -> Block:
return new_block
def _get_undone_block(blocks) -> (int, Block):
def _get_undone_block(blocks) -> Tuple[int, Block]:
min_interval_ms = 120000
max_remaining = 0
undone_block = None

View File

@@ -1,5 +1,5 @@
import aiohttp
import httpx
import asyncio
import json
from . import parser
@@ -14,6 +14,8 @@ from urllib.parse import quote
headers = config.headers
REPLAY_URL = "https://www.youtube.com/live_chat_replay?continuation="
INTERVAL = 1
def _split(start, end, count, min_interval_sec=120):
"""
Split section from `start` to `end` into `count` pieces,
@@ -37,7 +39,8 @@ def _split(start, end, count, min_interval_sec = 120):
raise ValueError("count must be equal to or greater than 1.")
if (end - start) / count < min_interval_sec:
count = int((end - start) / min_interval_sec)
if count == 0 : count = 1
if count == 0:
count = 1
interval = (end - start) / count
if count == 1:
@@ -45,17 +48,17 @@ def _split(start, end, count, min_interval_sec = 120):
return sorted(list(set([int(start + interval * j)
for j in range(count)])))
def ready_blocks(video_id, duration, div, callback):
if div <= 0: raise ValueError
if div <= 0:
raise ValueError
async def _get_blocks(video_id, duration, div, callback):
async with aiohttp.ClientSession() as session:
async with httpx.ClientSession() as session:
tasks = [_create_block(session, video_id, seektime, callback)
for seektime in _split(0, duration, div)]
return await asyncio.gather(*tasks)
async def _create_block(session, video_id, seektime, callback):
continuation = arcparam.getparam(video_id, seektime=seektime)
url = (f"{REPLAY_URL}{quote(continuation)}&playerOffsetMs="
@@ -84,6 +87,7 @@ def ready_blocks(video_id, duration, div, callback):
_get_blocks(video_id, duration, div, callback))
return blocks
def fetch_patch(callback, blocks, video_id):
async def _allocate_workers():
@@ -94,7 +98,7 @@ def fetch_patch(callback, blocks, video_id):
)
for block in blocks
]
async with aiohttp.ClientSession() as session:
async with httpx.ClientSession() as session:
tasks = [worker.run(session) for worker in workers]
return await asyncio.gather(*tasks)
@@ -124,6 +128,7 @@ def fetch_patch(callback, blocks, video_id):
except CancelledError:
pass
async def _shutdown():
print("\nshutdown...")
tasks = [t for t in asyncio.all_tasks()
@@ -135,7 +140,7 @@ async def _shutdown():
except asyncio.CancelledError:
pass
def cancel():
loop = asyncio.get_event_loop()
loop.create_task(_shutdown())

View File

@@ -1,6 +1,6 @@
import json
import re
import requests
import httpx
from .. import config
from ..exceptions import InvalidVideoIdException
from ..util.extract_video_id import extract_video_id
@@ -85,7 +85,7 @@ class VideoInfo:
def _get_page_text(self, video_id):
url = f"https://www.youtube.com/embed/{video_id}"
resp = requests.get(url, headers=headers)
resp = httpx.get(url, headers=headers)
resp.raise_for_status()
return resp.text

View File

@@ -1,11 +1,11 @@
import requests
import httpx
import json
import datetime
from .. import config
def extract(url):
_session = requests.Session()
_session = httpx.Client(http2=True)
html = _session.get(url, headers=config.headers)
with open(str(datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')
) + 'test.json', mode='w', encoding='utf-8') as f:

View File

@@ -1,5 +1,4 @@
aiohttp
protobuf
httpx==0.14.1
protobuf==3.13.0
pytz
requests
urllib3

View File

@@ -1,5 +1,4 @@
aioresponses
mock
mocker
pytest
pytest-mock
pytest_httpx

View File

@@ -1,5 +1,5 @@
import json
import requests
import httpx
import pytchat.config as config
from pytchat.paramgen import arcparam
from pytchat.parser.live import Parser
@@ -18,7 +18,7 @@ def test_arcparam_1(mocker):
def test_arcparam_2(mocker):
param = arcparam.getparam("SsjCnHOk-Sk", seektime=100)
url = f"https://www.youtube.com/live_chat_replay/get_live_chat_replay?continuation={param}&pbj=1"
resp = requests.Session().get(url, headers=config.headers)
resp = httpx.Client(http2=True).get(url, headers=config.headers)
jsn = json.loads(resp.text)
parser = Parser(is_replay=True)
contents = parser.get_contents(jsn)
@@ -26,6 +26,7 @@ def test_arcparam_2(mocker):
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatTextMessageRenderer"]["id"]
assert test_id == "CjoKGkNMYXBzZTdudHVVQ0Zjc0IxZ0FkTnFnQjVREhxDSnlBNHV2bnR1VUNGV0dnd2dvZDd3NE5aZy0w"
def test_arcparam_3(mocker):
param = arcparam.getparam("01234567890")
assert param == "op2w0wQmGhxDZzhLRFFvTE1ERXlNelExTmpjNE9UQWdBUT09SARgAXICCAE%3D"

View File

@@ -1,6 +1,6 @@
from pytchat.tool.mining import parser
import pytchat.config as config
import requests
import httpx
import json
from pytchat.paramgen import arcparam_mining as arcparam
@@ -28,7 +28,7 @@ def test_arcparam_1(mocker):
def test_arcparam_2(mocker):
param = arcparam.getparam("PZz9NB0-Z64", 1)
url = f"https://www.youtube.com/live_chat_replay?continuation={param}&playerOffsetMs=1000&pbj=1"
resp = requests.Session().get(url, headers=config.headers)
resp = httpx.Client(http2=True).get(url, headers=config.headers)
jsn = json.loads(resp.text)
_, chatdata = parser.parse(jsn[1])
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatPaidMessageRenderer"]["id"]

View File

@@ -1,77 +0,0 @@
import aiohttp
import asyncio
import json
from pytchat.tool.extract import parser
import sys
import time
from aioresponses import aioresponses
from concurrent.futures import CancelledError
from pytchat.tool.extract import asyncdl
def _open_file(path):
with open(path,mode ='r',encoding = 'utf-8') as f:
return f.read()
def test_asyncdl_split():
ret = asyncdl._split(0,1000,1)
assert ret == [0]
ret = asyncdl._split(1000,1000,10)
assert ret == [1000]
ret = asyncdl._split(0,1000,5)
assert ret == [0,200,400,600,800]
ret = asyncdl._split(10.5, 700.3, 5)
assert ret == [10, 148, 286, 424, 562]
ret = asyncdl._split(0,500,5)
assert ret == [0,125,250,375]
ret = asyncdl._split(0,500,500)
assert ret == [0,125,250,375]
ret = asyncdl._split(-1,1000,5)
assert ret == [-1, 199, 399, 599, 799]
"""invalid argument order"""
try:
ret = asyncdl._split(500,0,5)
assert False
except ValueError:
assert True
"""invalid count"""
try:
ret = asyncdl._split(0,500,-1)
assert False
except ValueError:
assert True
try:
ret = asyncdl._split(0,500,0)
assert False
except ValueError:
assert True
"""invalid argument type"""
try:
ret = asyncdl._split(0,5000,5.2)
assert False
except ValueError:
assert True
try:
ret = asyncdl._split(0,5000,"test")
assert False
except ValueError:
assert True
try:
ret = asyncdl._split([0,1],5000,5)
assert False
except ValueError:
assert True

View File

@@ -1,18 +1,15 @@
import aiohttp
import asyncio
import json
import os, sys
import time
from pytchat.tool.extract import duplcheck
from pytchat.tool.extract import parser
from pytchat.tool.extract.block import Block
from pytchat.tool.extract.duplcheck import _dump
def _open_file(path):
with open(path, mode='r', encoding='utf-8') as f:
return f.read()
def test_overlap():
"""
test overlap data
@@ -23,16 +20,23 @@ def test_overlap():
def load_chatdata(filename):
return parser.parse(
json.loads(_open_file("tests/testdata/extract_duplcheck/overlap/"+filename))
json.loads(_open_file(
"tests/testdata/extract_duplcheck/overlap/" + filename))
)[1]
blocks = (
Block(first = 0, last= 12771, end= 9890,chat_data = load_chatdata("dp0-0.json")),
Block(first = 9890, last= 15800, end= 20244,chat_data = load_chatdata("dp0-1.json")),
Block(first = 20244,last= 45146, end= 32476,chat_data = load_chatdata("dp0-2.json")),
Block(first = 32476,last= 50520, end= 41380,chat_data = load_chatdata("dp0-3.json")),
Block(first = 41380,last= 62875, end= 52568,chat_data = load_chatdata("dp0-4.json")),
Block(first = 52568,last= 62875, end= 54000,chat_data = load_chatdata("dp0-5.json"),is_last=True)
Block(first=0, last=12771, end=9890,
chat_data=load_chatdata("dp0-0.json")),
Block(first=9890, last=15800, end=20244,
chat_data=load_chatdata("dp0-1.json")),
Block(first=20244, last=45146, end=32476,
chat_data=load_chatdata("dp0-2.json")),
Block(first=32476, last=50520, end=41380,
chat_data=load_chatdata("dp0-3.json")),
Block(first=41380, last=62875, end=52568,
chat_data=load_chatdata("dp0-4.json")),
Block(first=52568, last=62875, end=54000,
chat_data=load_chatdata("dp0-5.json"), is_last=True)
)
result = duplcheck.remove_overlap(blocks)
# dp0-0.json has item offset time is 9890 (equals block[0].end = block[1].first),
@@ -50,11 +54,13 @@ def test_overlap():
# the last block must be always added to result.
assert result[5].last == 62875
def test_duplicate_head():
def load_chatdata(filename):
return parser.parse(
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
json.loads(_open_file(
"tests/testdata/extract_duplcheck/head/" + filename))
)[1]
"""
@@ -89,6 +95,7 @@ def test_duplicate_head():
assert result[2].first == blocks[5].first
assert result[2].last == blocks[5].last
def test_duplicate_tail():
"""
test duplicate tail data
@@ -103,7 +110,8 @@ def test_duplicate_tail():
"""
def load_chatdata(filename):
return parser.parse(
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
json.loads(_open_file(
"tests/testdata/extract_duplcheck/head/" + filename))
)[1]
# chat data offsets are ignored.
blocks = (
@@ -124,5 +132,3 @@ def test_duplicate_tail():
assert result[1].last == blocks[2].last
assert result[2].first == blocks[4].first
assert result[2].last == blocks[4].last

View File

@@ -1,19 +1,15 @@
import aiohttp
import asyncio
import json
import os, sys
import time
from aioresponses import aioresponses
from pytchat.tool.extract import duplcheck
from pytchat.tool.extract import parser
from pytchat.tool.extract.block import Block
from pytchat.tool.extract.patch import Patch, fill, split, set_patch
from pytchat.tool.extract.duplcheck import _dump
from pytchat.tool.extract.patch import Patch, split
def _open_file(path):
with open(path, mode='r', encoding='utf-8') as f:
return f.read()
def load_chatdata(filename):
return parser.parse(
json.loads(_open_file("tests/testdata/fetch_patch/" + filename))
@@ -61,8 +57,10 @@ def test_split_0():
@fetched patch
|-- patch --|
"""
parent = Block(first=0, last=4000, end=60000, continuation='parent', during_split=True)
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
parent = Block(first=0, last=4000, end=60000,
continuation='parent', during_split=True)
child = Block(first=0, last=0, end=60000,
continuation='mean', during_split=True)
patch = Patch(chats=load_chatdata('pt0-5.json'),
first=32500, last=34000, continuation='patch')
@@ -73,8 +71,9 @@ def test_split_0():
assert parent.end == child.first
assert child.first < child.last
assert child.last < child.end
assert parent.during_split == False
assert child.during_split == False
assert parent.during_split is False
assert child.during_split is False
def test_split_1():
"""patch.first <= parent_block.last
@@ -125,8 +124,9 @@ def test_split_1():
assert parent.last == 33000 # no change
assert parent.end == 60000 # no change
assert child.continuation is None
assert parent.during_split == False
assert child.during_split == True #exclude during_split sequence
assert parent.during_split is False
assert child.during_split is True # exclude during_split sequence
def test_split_2():
"""child_block.end < patch.last:
@@ -182,8 +182,9 @@ def test_split_2():
assert child.first < child.last
assert child.last < child.end
assert child.continuation is None
assert parent.during_split == False
assert child.during_split == False
assert parent.during_split is False
assert child.during_split is False
def test_split_none():
"""patch.last <= parent_block.last
@@ -234,5 +235,5 @@ def test_split_none():
assert parent.last == 40000 # no change
assert parent.end == 60000 # no change
assert child.continuation is None
assert parent.during_split == False
assert child.during_split == True #exclude during_split sequence
assert parent.during_split is False
assert child.during_split is True # exclude during_split sequence

View File

@@ -1,5 +1,8 @@
import asyncio
import json
from aioresponses import aioresponses
from pytest_httpx import HTTPXMock
from concurrent.futures import CancelledError
from pytchat.core_multithread.livechat import LiveChat
from pytchat.core_async.livechat import LiveChatAsync
from pytchat.exceptions import ResponseContextError
@@ -9,34 +12,37 @@ def _open_file(path):
return f.read()
@aioresponses()
def test_Async(*mock):
vid = '__test_id__'
_text = _open_file('tests/testdata/paramgen_firstread.json')
_text = json.loads(_text)
mock[0].get(
f"https://www.youtube.com/live_chat?v={vid}&is_popout=1", status=200, body=_text)
def add_response_file(httpx_mock: HTTPXMock, jsonfile_path: str):
testdata = json.loads(_open_file(jsonfile_path))
httpx_mock.add_response(json=testdata)
def test_async(httpx_mock: HTTPXMock):
add_response_file(httpx_mock, 'tests/testdata/paramgen_firstread.json')
async def test_loop():
try:
chat = LiveChatAsync(video_id='__test_id__')
_ = await chat.get()
assert chat.is_alive()
chat.terminate()
assert not chat.is_alive()
except ResponseContextError:
assert not chat.is_alive()
def test_MultiThread(mocker):
_text = _open_file('tests/testdata/paramgen_firstread.json')
_text = json.loads(_text)
responseMock = mocker.Mock()
responseMock.status_code = 200
responseMock.text = _text
mocker.patch('requests.Session.get').return_value = responseMock
assert False
loop = asyncio.get_event_loop()
try:
chat = LiveChatAsync(video_id='__test_id__')
loop.run_until_complete(test_loop())
except CancelledError:
assert True
def test_multithread(httpx_mock: HTTPXMock):
add_response_file(httpx_mock, 'tests/testdata/paramgen_firstread.json')
try:
chat = LiveChat(video_id='__test_id__')
_ = chat.get()
assert chat.is_alive()
chat.terminate()
assert not chat.is_alive()
except ResponseContextError:
chat.terminate()
assert not chat.is_alive()
assert False

View File

@@ -1,6 +1,6 @@
import asyncio
import re
from aioresponses import aioresponses
import json
from pytest_httpx import HTTPXMock
from concurrent.futures import CancelledError
from pytchat.core_multithread.livechat import LiveChat
from pytchat.core_async.livechat import LiveChatAsync
@@ -12,18 +12,18 @@ def _open_file(path):
return f.read()
@aioresponses()
def test_async_live_stream(*mock):
def add_response_file(httpx_mock: HTTPXMock, jsonfile_path: str):
testdata = json.loads(_open_file(jsonfile_path))
httpx_mock.add_response(json=testdata)
async def test_loop(*mock):
pattern = re.compile(
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
_text = _open_file('tests/testdata/test_stream.json')
mock[0].get(pattern, status=200, body=_text)
def test_async_live_stream(httpx_mock: HTTPXMock):
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
async def test_loop():
chat = LiveChatAsync(video_id='__test_id__', processor=DummyProcessor())
chats = await chat.get()
rawdata = chats[0]["chatdata"]
# assert fetching livachat data
assert list(rawdata[0]["addChatItemAction"]["item"].keys())[
0] == "liveChatTextMessageRenderer"
assert list(rawdata[1]["addChatItemAction"]["item"].keys())[
@@ -41,25 +41,16 @@ def test_async_live_stream(*mock):
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(test_loop(*mock))
loop.run_until_complete(test_loop())
except CancelledError:
assert True
@aioresponses()
def test_async_replay_stream(*mock):
async def test_loop(*mock):
pattern_live = re.compile(
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
pattern_replay = re.compile(
r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
# empty livechat -> switch to fetch replaychat
_text_live = _open_file('tests/testdata/finished_live.json')
_text_replay = _open_file('tests/testdata/chatreplay.json')
mock[0].get(pattern_live, status=200, body=_text_live)
mock[0].get(pattern_replay, status=200, body=_text_replay)
def test_async_replay_stream(httpx_mock: HTTPXMock):
add_response_file(httpx_mock, 'tests/testdata/finished_live.json')
add_response_file(httpx_mock, 'tests/testdata/chatreplay.json')
async def test_loop():
chat = LiveChatAsync(video_id='__test_id__', processor=DummyProcessor())
chats = await chat.get()
rawdata = chats[0]["chatdata"]
@@ -71,27 +62,16 @@ def test_async_replay_stream(*mock):
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(test_loop(*mock))
loop.run_until_complete(test_loop())
except CancelledError:
assert True
@aioresponses()
def test_async_force_replay(*mock):
def test_async_force_replay(httpx_mock: HTTPXMock):
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
add_response_file(httpx_mock, 'tests/testdata/chatreplay.json')
async def test_loop(*mock):
pattern_live = re.compile(
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
pattern_replay = re.compile(
r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
# valid live data, but force_replay = True
_text_live = _open_file('tests/testdata/test_stream.json')
# valid replay data
_text_replay = _open_file('tests/testdata/chatreplay.json')
mock[0].get(pattern_live, status=200, body=_text_live)
mock[0].get(pattern_replay, status=200, body=_text_replay)
# force replay
async def test_loop():
chat = LiveChatAsync(
video_id='__test_id__', processor=DummyProcessor(), force_replay=True)
chats = await chat.get()
@@ -105,20 +85,13 @@ def test_async_force_replay(*mock):
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(test_loop(*mock))
loop.run_until_complete(test_loop())
except CancelledError:
assert True
def test_multithread_live_stream(mocker):
_text = _open_file('tests/testdata/test_stream.json')
responseMock = mocker.Mock()
responseMock.status_code = 200
responseMock.text = _text
mocker.patch(
'requests.Session.get').return_value.__enter__.return_value = responseMock
def test_multithread_live_stream(httpx_mock: HTTPXMock):
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
chat = LiveChat(video_id='__test_id__', processor=DummyProcessor())
chats = chat.get()
rawdata = chats[0]["chatdata"]

View File

@@ -1,21 +1,18 @@
from pytchat.parser.live import Parser
import json
from aioresponses import aioresponses
from pytchat.exceptions import NoContents
parser = Parser(is_replay=False)
def _open_file(path):
with open(path, mode='r', encoding='utf-8') as f:
return f.read()
parser = Parser(is_replay=False)
@aioresponses()
def test_finishedlive(*mock):
'''配信が終了した動画を正しく処理できるか'''
_text = _open_file('tests/testdata/finished_live.json')
_text = json.loads(_text)
@@ -26,10 +23,8 @@ def test_finishedlive(*mock):
assert True
@aioresponses()
def test_parsejson(*mock):
'''jsonを正常にパースできるか'''
_text = _open_file('tests/testdata/paramgen_firstread.json')
_text = json.loads(_text)

View File

@@ -12,7 +12,7 @@ def _set_test_data(filepath, mocker):
response_mock = mocker.Mock()
response_mock.status_code = 200
response_mock.text = _text
mocker.patch('requests.get').return_value = response_mock
mocker.patch('httpx.get').return_value = response_mock
def test_archived_page(mocker):