Use httpx
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
import requests
|
||||
import httpx
|
||||
import pytchat.config as config
|
||||
from pytchat.paramgen import arcparam
|
||||
from pytchat.parser.live import Parser
|
||||
@@ -18,14 +18,15 @@ def test_arcparam_1(mocker):
|
||||
def test_arcparam_2(mocker):
|
||||
param = arcparam.getparam("SsjCnHOk-Sk", seektime=100)
|
||||
url = f"https://www.youtube.com/live_chat_replay/get_live_chat_replay?continuation={param}&pbj=1"
|
||||
resp = requests.Session().get(url, headers=config.headers)
|
||||
resp = httpx.Client(http2=True).get(url, headers=config.headers)
|
||||
jsn = json.loads(resp.text)
|
||||
parser = Parser(is_replay=True)
|
||||
contents = parser.get_contents(jsn)
|
||||
_ , chatdata = parser.parse(contents)
|
||||
_, chatdata = parser.parse(contents)
|
||||
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatTextMessageRenderer"]["id"]
|
||||
assert test_id == "CjoKGkNMYXBzZTdudHVVQ0Zjc0IxZ0FkTnFnQjVREhxDSnlBNHV2bnR1VUNGV0dnd2dvZDd3NE5aZy0w"
|
||||
|
||||
|
||||
def test_arcparam_3(mocker):
|
||||
param = arcparam.getparam("01234567890")
|
||||
assert param == "op2w0wQmGhxDZzhLRFFvTE1ERXlNelExTmpjNE9UQWdBUT09SARgAXICCAE%3D"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from pytchat.tool.mining import parser
|
||||
import pytchat.config as config
|
||||
import requests
|
||||
import httpx
|
||||
import json
|
||||
from pytchat.paramgen import arcparam_mining as arcparam
|
||||
|
||||
@@ -28,7 +28,7 @@ def test_arcparam_1(mocker):
|
||||
def test_arcparam_2(mocker):
|
||||
param = arcparam.getparam("PZz9NB0-Z64", 1)
|
||||
url = f"https://www.youtube.com/live_chat_replay?continuation={param}&playerOffsetMs=1000&pbj=1"
|
||||
resp = requests.Session().get(url, headers=config.headers)
|
||||
resp = httpx.Client(http2=True).get(url, headers=config.headers)
|
||||
jsn = json.loads(resp.text)
|
||||
_, chatdata = parser.parse(jsn[1])
|
||||
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatPaidMessageRenderer"]["id"]
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
from pytchat.tool.extract import parser
|
||||
import sys
|
||||
import time
|
||||
from aioresponses import aioresponses
|
||||
from concurrent.futures import CancelledError
|
||||
from pytchat.tool.extract import asyncdl
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def test_asyncdl_split():
|
||||
|
||||
ret = asyncdl._split(0,1000,1)
|
||||
assert ret == [0]
|
||||
|
||||
ret = asyncdl._split(1000,1000,10)
|
||||
assert ret == [1000]
|
||||
|
||||
ret = asyncdl._split(0,1000,5)
|
||||
assert ret == [0,200,400,600,800]
|
||||
|
||||
ret = asyncdl._split(10.5, 700.3, 5)
|
||||
assert ret == [10, 148, 286, 424, 562]
|
||||
|
||||
|
||||
ret = asyncdl._split(0,500,5)
|
||||
assert ret == [0,125,250,375]
|
||||
|
||||
ret = asyncdl._split(0,500,500)
|
||||
assert ret == [0,125,250,375]
|
||||
|
||||
ret = asyncdl._split(-1,1000,5)
|
||||
assert ret == [-1, 199, 399, 599, 799]
|
||||
|
||||
"""invalid argument order"""
|
||||
try:
|
||||
ret = asyncdl._split(500,0,5)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
"""invalid count"""
|
||||
try:
|
||||
ret = asyncdl._split(0,500,-1)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
try:
|
||||
ret = asyncdl._split(0,500,0)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
"""invalid argument type"""
|
||||
try:
|
||||
ret = asyncdl._split(0,5000,5.2)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
try:
|
||||
ret = asyncdl._split(0,5000,"test")
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
try:
|
||||
ret = asyncdl._split([0,1],5000,5)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
@@ -1,60 +1,66 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
import os, sys
|
||||
import time
|
||||
from pytchat.tool.extract import duplcheck
|
||||
from pytchat.tool.extract import parser
|
||||
from pytchat.tool.extract.block import Block
|
||||
from pytchat.tool.extract.duplcheck import _dump
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def test_overlap():
|
||||
"""
|
||||
test overlap data
|
||||
operation : [0] [2] [3] [4] -> last :align to end
|
||||
[1] , [5] -> no change
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/extract_duplcheck/overlap/"+filename))
|
||||
json.loads(_open_file(
|
||||
"tests/testdata/extract_duplcheck/overlap/" + filename))
|
||||
)[1]
|
||||
|
||||
blocks = (
|
||||
Block(first = 0, last= 12771, end= 9890,chat_data = load_chatdata("dp0-0.json")),
|
||||
Block(first = 9890, last= 15800, end= 20244,chat_data = load_chatdata("dp0-1.json")),
|
||||
Block(first = 20244,last= 45146, end= 32476,chat_data = load_chatdata("dp0-2.json")),
|
||||
Block(first = 32476,last= 50520, end= 41380,chat_data = load_chatdata("dp0-3.json")),
|
||||
Block(first = 41380,last= 62875, end= 52568,chat_data = load_chatdata("dp0-4.json")),
|
||||
Block(first = 52568,last= 62875, end= 54000,chat_data = load_chatdata("dp0-5.json"),is_last=True)
|
||||
Block(first=0, last=12771, end=9890,
|
||||
chat_data=load_chatdata("dp0-0.json")),
|
||||
Block(first=9890, last=15800, end=20244,
|
||||
chat_data=load_chatdata("dp0-1.json")),
|
||||
Block(first=20244, last=45146, end=32476,
|
||||
chat_data=load_chatdata("dp0-2.json")),
|
||||
Block(first=32476, last=50520, end=41380,
|
||||
chat_data=load_chatdata("dp0-3.json")),
|
||||
Block(first=41380, last=62875, end=52568,
|
||||
chat_data=load_chatdata("dp0-4.json")),
|
||||
Block(first=52568, last=62875, end=54000,
|
||||
chat_data=load_chatdata("dp0-5.json"), is_last=True)
|
||||
)
|
||||
result = duplcheck.remove_overlap(blocks)
|
||||
#dp0-0.json has item offset time is 9890 (equals block[0].end = block[1].first),
|
||||
#but must be aligne to the most close and smaller value:9779.
|
||||
# dp0-0.json has item offset time is 9890 (equals block[0].end = block[1].first),
|
||||
# but must be aligne to the most close and smaller value:9779.
|
||||
assert result[0].last == 9779
|
||||
|
||||
|
||||
assert result[1].last == 15800
|
||||
|
||||
|
||||
assert result[2].last == 32196
|
||||
|
||||
|
||||
assert result[3].last == 41116
|
||||
|
||||
|
||||
assert result[4].last == 52384
|
||||
|
||||
#the last block must be always added to result.
|
||||
|
||||
# the last block must be always added to result.
|
||||
assert result[5].last == 62875
|
||||
|
||||
|
||||
|
||||
def test_duplicate_head():
|
||||
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
|
||||
json.loads(_open_file(
|
||||
"tests/testdata/extract_duplcheck/head/" + filename))
|
||||
)[1]
|
||||
|
||||
"""
|
||||
@@ -69,25 +75,26 @@ def test_duplicate_head():
|
||||
result : [2] , [4] , [5]
|
||||
"""
|
||||
|
||||
#chat data offsets are ignored.
|
||||
# chat data offsets are ignored.
|
||||
blocks = (
|
||||
Block(first = 0, last = 2500, chat_data = load_chatdata("dp0-0.json")),
|
||||
Block(first = 0, last =38771, chat_data = load_chatdata("dp0-1.json")),
|
||||
Block(first = 0, last =45146, chat_data = load_chatdata("dp0-2.json")),
|
||||
Block(first = 20244, last =60520, chat_data = load_chatdata("dp0-3.json")),
|
||||
Block(first = 20244, last =62875, chat_data = load_chatdata("dp0-4.json")),
|
||||
Block(first = 52568, last =62875, chat_data = load_chatdata("dp0-5.json"))
|
||||
Block(first=0, last=2500, chat_data=load_chatdata("dp0-0.json")),
|
||||
Block(first=0, last=38771, chat_data=load_chatdata("dp0-1.json")),
|
||||
Block(first=0, last=45146, chat_data=load_chatdata("dp0-2.json")),
|
||||
Block(first=20244, last=60520, chat_data=load_chatdata("dp0-3.json")),
|
||||
Block(first=20244, last=62875, chat_data=load_chatdata("dp0-4.json")),
|
||||
Block(first=52568, last=62875, chat_data=load_chatdata("dp0-5.json"))
|
||||
)
|
||||
_dump(blocks)
|
||||
result = duplcheck.remove_duplicate_head(blocks)
|
||||
|
||||
|
||||
assert len(result) == 3
|
||||
assert result[0].first == blocks[2].first
|
||||
assert result[0].last == blocks[2].last
|
||||
assert result[0].last == blocks[2].last
|
||||
assert result[1].first == blocks[4].first
|
||||
assert result[1].last == blocks[4].last
|
||||
assert result[1].last == blocks[4].last
|
||||
assert result[2].first == blocks[5].first
|
||||
assert result[2].last == blocks[5].last
|
||||
assert result[2].last == blocks[5].last
|
||||
|
||||
|
||||
def test_duplicate_tail():
|
||||
"""
|
||||
@@ -103,26 +110,25 @@ def test_duplicate_tail():
|
||||
"""
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
|
||||
json.loads(_open_file(
|
||||
"tests/testdata/extract_duplcheck/head/" + filename))
|
||||
)[1]
|
||||
#chat data offsets are ignored.
|
||||
# chat data offsets are ignored.
|
||||
blocks = (
|
||||
Block(first = 0,last = 2500, chat_data=load_chatdata("dp0-0.json")),
|
||||
Block(first = 1500,last = 2500, chat_data=load_chatdata("dp0-1.json")),
|
||||
Block(first = 10000,last = 45146, chat_data=load_chatdata("dp0-2.json")),
|
||||
Block(first = 20244,last = 45146, chat_data=load_chatdata("dp0-3.json")),
|
||||
Block(first = 20244,last = 62875, chat_data=load_chatdata("dp0-4.json")),
|
||||
Block(first = 52568,last = 62875, chat_data=load_chatdata("dp0-5.json"))
|
||||
Block(first=0, last=2500, chat_data=load_chatdata("dp0-0.json")),
|
||||
Block(first=1500, last=2500, chat_data=load_chatdata("dp0-1.json")),
|
||||
Block(first=10000, last=45146, chat_data=load_chatdata("dp0-2.json")),
|
||||
Block(first=20244, last=45146, chat_data=load_chatdata("dp0-3.json")),
|
||||
Block(first=20244, last=62875, chat_data=load_chatdata("dp0-4.json")),
|
||||
Block(first=52568, last=62875, chat_data=load_chatdata("dp0-5.json"))
|
||||
)
|
||||
|
||||
result = duplcheck.remove_duplicate_tail(blocks)
|
||||
_dump(result)
|
||||
assert len(result) == 3
|
||||
assert result[0].first == blocks[0].first
|
||||
assert result[0].last == blocks[0].last
|
||||
assert result[0].last == blocks[0].last
|
||||
assert result[1].first == blocks[2].first
|
||||
assert result[1].last == blocks[2].last
|
||||
assert result[1].last == blocks[2].last
|
||||
assert result[2].first == blocks[4].first
|
||||
assert result[2].last == blocks[4].last
|
||||
|
||||
|
||||
assert result[2].last == blocks[4].last
|
||||
|
||||
@@ -1,23 +1,19 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
import os, sys
|
||||
import time
|
||||
from aioresponses import aioresponses
|
||||
from pytchat.tool.extract import duplcheck
|
||||
|
||||
from pytchat.tool.extract import parser
|
||||
from pytchat.tool.extract.block import Block
|
||||
from pytchat.tool.extract.patch import Patch, fill, split, set_patch
|
||||
from pytchat.tool.extract.duplcheck import _dump
|
||||
from pytchat.tool.extract.patch import Patch, split
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/fetch_patch/"+filename))
|
||||
)[1]
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/fetch_patch/" + filename))
|
||||
)[1]
|
||||
|
||||
|
||||
def test_split_0():
|
||||
@@ -61,20 +57,23 @@ def test_split_0():
|
||||
@fetched patch
|
||||
|-- patch --|
|
||||
"""
|
||||
parent = Block(first=0, last=4000, end=60000, continuation='parent', during_split=True)
|
||||
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
||||
parent = Block(first=0, last=4000, end=60000,
|
||||
continuation='parent', during_split=True)
|
||||
child = Block(first=0, last=0, end=60000,
|
||||
continuation='mean', during_split=True)
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent, child, patch)
|
||||
|
||||
assert child.continuation == 'patch'
|
||||
assert parent.last < child.first
|
||||
assert parent.end == child.first
|
||||
assert child.first < child.last
|
||||
assert child.last < child.end
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == False
|
||||
assert parent.during_split is False
|
||||
assert child.during_split is False
|
||||
|
||||
|
||||
def test_split_1():
|
||||
"""patch.first <= parent_block.last
|
||||
@@ -119,14 +118,15 @@ def test_split_1():
|
||||
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
|
||||
assert parent.last == 33000 #no change
|
||||
assert parent.end == 60000 #no change
|
||||
split(parent, child, patch)
|
||||
|
||||
assert parent.last == 33000 # no change
|
||||
assert parent.end == 60000 # no change
|
||||
assert child.continuation is None
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == True #exclude during_split sequence
|
||||
assert parent.during_split is False
|
||||
assert child.during_split is True # exclude during_split sequence
|
||||
|
||||
|
||||
def test_split_2():
|
||||
"""child_block.end < patch.last:
|
||||
@@ -174,7 +174,7 @@ def test_split_2():
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
split(parent, child, patch)
|
||||
|
||||
assert child.continuation is None
|
||||
assert parent.last < child.first
|
||||
@@ -182,8 +182,9 @@ def test_split_2():
|
||||
assert child.first < child.last
|
||||
assert child.last < child.end
|
||||
assert child.continuation is None
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == False
|
||||
assert parent.during_split is False
|
||||
assert child.during_split is False
|
||||
|
||||
|
||||
def test_split_none():
|
||||
"""patch.last <= parent_block.last
|
||||
@@ -193,7 +194,7 @@ def test_split_none():
|
||||
and parent.block.last exceeds patch.first.
|
||||
|
||||
In this case, fetched patch is all discarded,
|
||||
and worker searches other processing block again.
|
||||
and worker searches other processing block again.
|
||||
|
||||
~~~~~~ before ~~~~~~
|
||||
|
||||
@@ -229,10 +230,10 @@ def test_split_none():
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
split(parent, child, patch)
|
||||
|
||||
assert parent.last == 40000 #no change
|
||||
assert parent.end == 60000 #no change
|
||||
assert parent.last == 40000 # no change
|
||||
assert parent.end == 60000 # no change
|
||||
assert child.continuation is None
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == True #exclude during_split sequence
|
||||
assert parent.during_split is False
|
||||
assert child.during_split is True # exclude during_split sequence
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import asyncio
|
||||
import json
|
||||
from aioresponses import aioresponses
|
||||
from pytest_httpx import HTTPXMock
|
||||
from concurrent.futures import CancelledError
|
||||
from pytchat.core_multithread.livechat import LiveChat
|
||||
from pytchat.core_async.livechat import LiveChatAsync
|
||||
from pytchat.exceptions import ResponseContextError
|
||||
|
||||
@@ -9,34 +12,37 @@ def _open_file(path):
|
||||
return f.read()
|
||||
|
||||
|
||||
@aioresponses()
|
||||
def test_Async(*mock):
|
||||
vid = '__test_id__'
|
||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
||||
_text = json.loads(_text)
|
||||
mock[0].get(
|
||||
f"https://www.youtube.com/live_chat?v={vid}&is_popout=1", status=200, body=_text)
|
||||
def add_response_file(httpx_mock: HTTPXMock, jsonfile_path: str):
|
||||
testdata = json.loads(_open_file(jsonfile_path))
|
||||
httpx_mock.add_response(json=testdata)
|
||||
|
||||
|
||||
def test_async(httpx_mock: HTTPXMock):
|
||||
add_response_file(httpx_mock, 'tests/testdata/paramgen_firstread.json')
|
||||
|
||||
async def test_loop():
|
||||
try:
|
||||
chat = LiveChatAsync(video_id='__test_id__')
|
||||
_ = await chat.get()
|
||||
assert chat.is_alive()
|
||||
chat.terminate()
|
||||
assert not chat.is_alive()
|
||||
except ResponseContextError:
|
||||
assert False
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chat = LiveChatAsync(video_id='__test_id__')
|
||||
loop.run_until_complete(test_loop())
|
||||
except CancelledError:
|
||||
assert True
|
||||
|
||||
|
||||
def test_multithread(httpx_mock: HTTPXMock):
|
||||
add_response_file(httpx_mock, 'tests/testdata/paramgen_firstread.json')
|
||||
try:
|
||||
chat = LiveChat(video_id='__test_id__')
|
||||
_ = chat.get()
|
||||
assert chat.is_alive()
|
||||
chat.terminate()
|
||||
assert not chat.is_alive()
|
||||
except ResponseContextError:
|
||||
assert not chat.is_alive()
|
||||
|
||||
|
||||
def test_MultiThread(mocker):
|
||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
||||
_text = json.loads(_text)
|
||||
responseMock = mocker.Mock()
|
||||
responseMock.status_code = 200
|
||||
responseMock.text = _text
|
||||
mocker.patch('requests.Session.get').return_value = responseMock
|
||||
try:
|
||||
chat = LiveChatAsync(video_id='__test_id__')
|
||||
assert chat.is_alive()
|
||||
chat.terminate()
|
||||
assert not chat.is_alive()
|
||||
except ResponseContextError:
|
||||
chat.terminate()
|
||||
assert not chat.is_alive()
|
||||
assert False
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
import re
|
||||
from aioresponses import aioresponses
|
||||
import json
|
||||
from pytest_httpx import HTTPXMock
|
||||
from concurrent.futures import CancelledError
|
||||
from pytchat.core_multithread.livechat import LiveChat
|
||||
from pytchat.core_async.livechat import LiveChatAsync
|
||||
@@ -12,18 +12,18 @@ def _open_file(path):
|
||||
return f.read()
|
||||
|
||||
|
||||
@aioresponses()
|
||||
def test_async_live_stream(*mock):
|
||||
def add_response_file(httpx_mock: HTTPXMock, jsonfile_path: str):
|
||||
testdata = json.loads(_open_file(jsonfile_path))
|
||||
httpx_mock.add_response(json=testdata)
|
||||
|
||||
async def test_loop(*mock):
|
||||
pattern = re.compile(
|
||||
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
||||
_text = _open_file('tests/testdata/test_stream.json')
|
||||
mock[0].get(pattern, status=200, body=_text)
|
||||
|
||||
def test_async_live_stream(httpx_mock: HTTPXMock):
|
||||
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
|
||||
|
||||
async def test_loop():
|
||||
chat = LiveChatAsync(video_id='__test_id__', processor=DummyProcessor())
|
||||
chats = await chat.get()
|
||||
rawdata = chats[0]["chatdata"]
|
||||
# assert fetching livachat data
|
||||
assert list(rawdata[0]["addChatItemAction"]["item"].keys())[
|
||||
0] == "liveChatTextMessageRenderer"
|
||||
assert list(rawdata[1]["addChatItemAction"]["item"].keys())[
|
||||
@@ -41,25 +41,16 @@ def test_async_live_stream(*mock):
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(test_loop(*mock))
|
||||
loop.run_until_complete(test_loop())
|
||||
except CancelledError:
|
||||
assert True
|
||||
|
||||
|
||||
@aioresponses()
|
||||
def test_async_replay_stream(*mock):
|
||||
|
||||
async def test_loop(*mock):
|
||||
pattern_live = re.compile(
|
||||
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
||||
pattern_replay = re.compile(
|
||||
r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
|
||||
# empty livechat -> switch to fetch replaychat
|
||||
_text_live = _open_file('tests/testdata/finished_live.json')
|
||||
_text_replay = _open_file('tests/testdata/chatreplay.json')
|
||||
mock[0].get(pattern_live, status=200, body=_text_live)
|
||||
mock[0].get(pattern_replay, status=200, body=_text_replay)
|
||||
def test_async_replay_stream(httpx_mock: HTTPXMock):
|
||||
add_response_file(httpx_mock, 'tests/testdata/finished_live.json')
|
||||
add_response_file(httpx_mock, 'tests/testdata/chatreplay.json')
|
||||
|
||||
async def test_loop():
|
||||
chat = LiveChatAsync(video_id='__test_id__', processor=DummyProcessor())
|
||||
chats = await chat.get()
|
||||
rawdata = chats[0]["chatdata"]
|
||||
@@ -71,27 +62,16 @@ def test_async_replay_stream(*mock):
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(test_loop(*mock))
|
||||
loop.run_until_complete(test_loop())
|
||||
except CancelledError:
|
||||
assert True
|
||||
|
||||
|
||||
@aioresponses()
|
||||
def test_async_force_replay(*mock):
|
||||
def test_async_force_replay(httpx_mock: HTTPXMock):
|
||||
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
|
||||
add_response_file(httpx_mock, 'tests/testdata/chatreplay.json')
|
||||
|
||||
async def test_loop(*mock):
|
||||
pattern_live = re.compile(
|
||||
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
||||
pattern_replay = re.compile(
|
||||
r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
|
||||
# valid live data, but force_replay = True
|
||||
_text_live = _open_file('tests/testdata/test_stream.json')
|
||||
# valid replay data
|
||||
_text_replay = _open_file('tests/testdata/chatreplay.json')
|
||||
|
||||
mock[0].get(pattern_live, status=200, body=_text_live)
|
||||
mock[0].get(pattern_replay, status=200, body=_text_replay)
|
||||
# force replay
|
||||
async def test_loop():
|
||||
chat = LiveChatAsync(
|
||||
video_id='__test_id__', processor=DummyProcessor(), force_replay=True)
|
||||
chats = await chat.get()
|
||||
@@ -105,20 +85,13 @@ def test_async_force_replay(*mock):
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(test_loop(*mock))
|
||||
loop.run_until_complete(test_loop())
|
||||
except CancelledError:
|
||||
assert True
|
||||
|
||||
|
||||
def test_multithread_live_stream(mocker):
|
||||
|
||||
_text = _open_file('tests/testdata/test_stream.json')
|
||||
responseMock = mocker.Mock()
|
||||
responseMock.status_code = 200
|
||||
responseMock.text = _text
|
||||
mocker.patch(
|
||||
'requests.Session.get').return_value.__enter__.return_value = responseMock
|
||||
|
||||
def test_multithread_live_stream(httpx_mock: HTTPXMock):
|
||||
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
|
||||
chat = LiveChat(video_id='__test_id__', processor=DummyProcessor())
|
||||
chats = chat.get()
|
||||
rawdata = chats[0]["chatdata"]
|
||||
|
||||
@@ -1,21 +1,18 @@
|
||||
from pytchat.parser.live import Parser
|
||||
import json
|
||||
from aioresponses import aioresponses
|
||||
from pytchat.exceptions import NoContents
|
||||
|
||||
|
||||
parser = Parser(is_replay=False)
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
parser = Parser(is_replay=False)
|
||||
|
||||
|
||||
@aioresponses()
|
||||
def test_finishedlive(*mock):
|
||||
'''配信が終了した動画を正しく処理できるか'''
|
||||
|
||||
_text = _open_file('tests/testdata/finished_live.json')
|
||||
_text = json.loads(_text)
|
||||
|
||||
@@ -26,10 +23,8 @@ def test_finishedlive(*mock):
|
||||
assert True
|
||||
|
||||
|
||||
@aioresponses()
|
||||
def test_parsejson(*mock):
|
||||
'''jsonを正常にパースできるか'''
|
||||
|
||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
||||
_text = json.loads(_text)
|
||||
|
||||
|
||||
@@ -12,13 +12,13 @@ def _set_test_data(filepath, mocker):
|
||||
response_mock = mocker.Mock()
|
||||
response_mock.status_code = 200
|
||||
response_mock.text = _text
|
||||
mocker.patch('requests.get').return_value = response_mock
|
||||
mocker.patch('httpx.get').return_value = response_mock
|
||||
|
||||
|
||||
def test_archived_page(mocker):
|
||||
_set_test_data('tests/testdata/videoinfo/archived_page.txt', mocker)
|
||||
info = VideoInfo('__test_id__')
|
||||
actual_thumbnail_url = 'https://i.ytimg.com/vi/fzI9FNjXQ0o/hqdefault.jpg'
|
||||
actual_thumbnail_url = 'https://i.ytimg.com/vi/fzI9FNjXQ0o/hqdefault.jpg'
|
||||
assert info.video_id == '__test_id__'
|
||||
assert info.get_channel_name() == 'GitHub'
|
||||
assert info.get_thumbnail() == actual_thumbnail_url
|
||||
|
||||
Reference in New Issue
Block a user