Compare commits
93 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b7e6043a71 | ||
|
|
820ba35013 | ||
|
|
ecd2d130bf | ||
|
|
f77a2c889b | ||
|
|
47d5ab288f | ||
|
|
5f53fd24dd | ||
|
|
11a9d0e2d7 | ||
|
|
480c9e15b8 | ||
|
|
35aa7636f6 | ||
|
|
8fee67c2d4 | ||
|
|
d3f1643a40 | ||
|
|
eb29f27493 | ||
|
|
8adf75ab83 | ||
|
|
2e05803d75 | ||
|
|
f16c0ee73a | ||
|
|
a338f2b782 | ||
|
|
864ccddfd7 | ||
|
|
339df69e36 | ||
|
|
76a5b0cd18 | ||
|
|
be0ab2431b | ||
|
|
2edb60c592 | ||
|
|
2c6c3a1ca3 | ||
|
|
4be540793d | ||
|
|
08b86fe596 | ||
|
|
157f3b9952 | ||
|
|
8f3ca2662a | ||
|
|
c4b015861c | ||
|
|
3aa413d59e | ||
|
|
03ba285a16 | ||
|
|
5fe0ee5aa8 | ||
|
|
4e829a25d4 | ||
|
|
15132a9bb8 | ||
|
|
64ace9dad6 | ||
|
|
9a2e96d3a0 | ||
|
|
a3695a59b8 | ||
|
|
bc8655ed62 | ||
|
|
3bdc465740 | ||
|
|
235d6b7212 | ||
|
|
9f0754da57 | ||
|
|
306b0a4564 | ||
|
|
1c49387f1a | ||
|
|
300d96e56c | ||
|
|
0e301f48a8 | ||
|
|
a790ab13a9 | ||
|
|
0456300d19 | ||
|
|
2ef1e7028f | ||
|
|
9413c4a186 | ||
|
|
8a8cef399f | ||
|
|
3bcad12cf6 | ||
|
|
4eb18279fe | ||
|
|
e9ed564e1b | ||
|
|
95f975c93d | ||
|
|
8012e1d191 | ||
|
|
f9480ea1eb | ||
|
|
404727c49c | ||
|
|
6b924a88ef | ||
|
|
56294d6a67 | ||
|
|
283443e374 | ||
|
|
89b51c420f | ||
|
|
96474f10c6 | ||
|
|
5f78a99507 | ||
|
|
78373bf45c | ||
|
|
3e11deed8f | ||
|
|
6daa375adf | ||
|
|
497d84015e | ||
|
|
a90bda674d | ||
|
|
48543b7866 | ||
|
|
5d3c7b5abd | ||
|
|
8df7062873 | ||
|
|
b788f692ad | ||
|
|
713215f1d7 | ||
|
|
f16ef60f11 | ||
|
|
9bbdb6c4de | ||
|
|
2200abf204 | ||
|
|
3ed0cb2c35 | ||
|
|
5fa4d051ee | ||
|
|
cd6d522055 | ||
|
|
aa8a4fb592 | ||
|
|
92a01aa4d9 | ||
|
|
dbde072828 | ||
|
|
e3f9f95fb1 | ||
|
|
fa02116ab4 | ||
|
|
d8656161cd | ||
|
|
174d9f27c0 | ||
|
|
0abf8dd9f0 | ||
|
|
5ab653a1b2 | ||
|
|
6e6bb8e019 | ||
|
|
ee4b696fc5 | ||
|
|
fd1d283caa | ||
|
|
85966186b5 | ||
|
|
71341d2876 | ||
|
|
8882c82f8b | ||
|
|
584b9c5591 |
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2020 taizan-hokuto
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
10
README.md
10
README.md
@@ -7,7 +7,7 @@ pytchat is a python library for fetching youtube live chat.
|
|||||||
pytchat is a python library for fetching youtube live chat
|
pytchat is a python library for fetching youtube live chat
|
||||||
without using youtube api, Selenium or BeautifulSoup.
|
without using youtube api, Selenium or BeautifulSoup.
|
||||||
|
|
||||||
pytchatはAPIを使わずにYouTubeチャットを取得するためのpythonライブラリです。
|
pytchatは、YouTubeチャットを閲覧するためのpythonライブラリです。
|
||||||
|
|
||||||
Other features:
|
Other features:
|
||||||
+ Customizable [chat data processors](https://github.com/taizan-hokuto/pytchat/wiki/ChatProcessor) including youtube api compatible one.
|
+ Customizable [chat data processors](https://github.com/taizan-hokuto/pytchat/wiki/ChatProcessor) including youtube api compatible one.
|
||||||
@@ -30,10 +30,9 @@ One-liner command.
|
|||||||
Save chat data to html, with embedded custom emojis.
|
Save chat data to html, with embedded custom emojis.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ pytchat -v ZJ6Q4U_Vg6s -o "c:/temp/"
|
$ pytchat -v https://www.youtube.com/watch?v=ZJ6Q4U_Vg6s -o "c:/temp/"
|
||||||
|
|
||||||
# options:
|
# options:
|
||||||
# -v : video_id
|
# -v : Video ID or URL that includes ID
|
||||||
# -o : output directory (default path: './')
|
# -o : output directory (default path: './')
|
||||||
# saved filename is [video_id].html
|
# saved filename is [video_id].html
|
||||||
```
|
```
|
||||||
@@ -43,7 +42,8 @@ $ pytchat -v ZJ6Q4U_Vg6s -o "c:/temp/"
|
|||||||
```python
|
```python
|
||||||
from pytchat import LiveChat
|
from pytchat import LiveChat
|
||||||
livechat = LiveChat(video_id = "Zvp1pJpie4I")
|
livechat = LiveChat(video_id = "Zvp1pJpie4I")
|
||||||
|
# It is also possible to specify a URL that includes the video ID:
|
||||||
|
# livechat = LiveChat("https://www.youtube.com/watch?v=Zvp1pJpie4I")
|
||||||
while livechat.is_alive():
|
while livechat.is_alive():
|
||||||
try:
|
try:
|
||||||
chatdata = livechat.get()
|
chatdata = livechat.get()
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
pytchat is a lightweight python library to browse youtube livechat without Selenium or BeautifulSoup.
|
pytchat is a lightweight python library to browse youtube livechat without Selenium or BeautifulSoup.
|
||||||
"""
|
"""
|
||||||
__copyright__ = 'Copyright (C) 2019 taizan-hokuto'
|
__copyright__ = 'Copyright (C) 2019 taizan-hokuto'
|
||||||
__version__ = '0.0.9'
|
__version__ = '0.2.4'
|
||||||
__license__ = 'MIT'
|
__license__ = 'MIT'
|
||||||
__author__ = 'taizan-hokuto'
|
__author__ = 'taizan-hokuto'
|
||||||
__author_email__ = '55448286+taizan-hokuto@users.noreply.github.com'
|
__author_email__ = '55448286+taizan-hokuto@users.noreply.github.com'
|
||||||
|
|||||||
@@ -1,10 +1,18 @@
|
|||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import time
|
||||||
|
from json.decoder import JSONDecodeError
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from .arguments import Arguments
|
from .arguments import Arguments
|
||||||
from .. exceptions import InvalidVideoIdException, NoContents
|
from .progressbar import ProgressBar
|
||||||
|
from .. exceptions import InvalidVideoIdException, NoContents, PatternUnmatchError
|
||||||
from .. processors.html_archiver import HTMLArchiver
|
from .. processors.html_archiver import HTMLArchiver
|
||||||
from .. tool.extract.extractor import Extractor
|
from .. tool.extract.extractor import Extractor
|
||||||
from .. tool.videoinfo import VideoInfo
|
from .. tool.videoinfo import VideoInfo
|
||||||
|
from .. util.extract_video_id import extract_video_id
|
||||||
|
from .. import util
|
||||||
from .. import __version__
|
from .. import __version__
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@@ -19,42 +27,95 @@ https://github.com/PetterKraabol/Twitch-Chat-Downloader
|
|||||||
def main():
|
def main():
|
||||||
# Arguments
|
# Arguments
|
||||||
parser = argparse.ArgumentParser(description=f'pytchat v{__version__}')
|
parser = argparse.ArgumentParser(description=f'pytchat v{__version__}')
|
||||||
parser.add_argument('-v', f'--{Arguments.Name.VIDEO}', type=str,
|
parser.add_argument('-v', f'--{Arguments.Name.VIDEO_IDS}', type=str,
|
||||||
help='Video IDs separated by commas without space.\n'
|
help='Video ID (or URL that includes Video ID). You can specify multiple video IDs by '
|
||||||
|
'separating them with commas without spaces.\n'
|
||||||
'If ID starts with a hyphen (-), enclose the ID in square brackets.')
|
'If ID starts with a hyphen (-), enclose the ID in square brackets.')
|
||||||
parser.add_argument('-o', f'--{Arguments.Name.OUTPUT}', type=str,
|
parser.add_argument('-o', f'--{Arguments.Name.OUTPUT}', type=str,
|
||||||
help='Output directory (end with "/"). default="./"', default='./')
|
help='Output directory (end with "/"). default="./"', default='./')
|
||||||
|
parser.add_argument(f'--{Arguments.Name.SAVE_ERROR_DATA}', action='store_true',
|
||||||
|
help='Save error data when error occurs(".dat" file)')
|
||||||
parser.add_argument(f'--{Arguments.Name.VERSION}', action='store_true',
|
parser.add_argument(f'--{Arguments.Name.VERSION}', action='store_true',
|
||||||
help='Settings version')
|
help='Show version')
|
||||||
Arguments(parser.parse_args().__dict__)
|
Arguments(parser.parse_args().__dict__)
|
||||||
|
|
||||||
if Arguments().print_version:
|
if Arguments().print_version:
|
||||||
print(f'pytchat v{__version__}')
|
print(f'pytchat v{__version__} © 2019 taizan-hokuto')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Extractor
|
# Extractor
|
||||||
if Arguments().video_ids:
|
if not Arguments().video_ids:
|
||||||
for video_id in Arguments().video_ids:
|
parser.print_help()
|
||||||
if '[' in video_id:
|
|
||||||
video_id = video_id.replace('[', '').replace(']', '')
|
|
||||||
try:
|
|
||||||
info = VideoInfo(video_id)
|
|
||||||
print(f"Extracting...\n"
|
|
||||||
f" video_id: {video_id}\n"
|
|
||||||
f" channel: {info.get_channel_name()}\n"
|
|
||||||
f" title: {info.get_title()}")
|
|
||||||
path = Path(Arguments().output + video_id + '.html')
|
|
||||||
print(f"output path: {path.resolve()}")
|
|
||||||
Extractor(video_id,
|
|
||||||
processor=HTMLArchiver(
|
|
||||||
Arguments().output + video_id + '.html'),
|
|
||||||
callback=_disp_progress
|
|
||||||
).extract()
|
|
||||||
print("\nExtraction end.\n")
|
|
||||||
except (InvalidVideoIdException, NoContents) as e:
|
|
||||||
print(e)
|
|
||||||
return
|
return
|
||||||
parser.print_help()
|
for counter, video_id in enumerate(Arguments().video_ids):
|
||||||
|
if '[' in video_id:
|
||||||
|
video_id = video_id.replace('[', '').replace(']', '')
|
||||||
|
try:
|
||||||
|
video_id = extract_video_id(video_id)
|
||||||
|
if os.path.exists(Arguments().output):
|
||||||
|
path = Path(Arguments().output + video_id + '.html')
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError
|
||||||
|
err = None
|
||||||
|
for _ in range(3): # retry 3 times
|
||||||
|
try:
|
||||||
|
info = VideoInfo(video_id)
|
||||||
|
break
|
||||||
|
except (PatternUnmatchError, JSONDecodeError, InvalidVideoIdException) as e:
|
||||||
|
err = e
|
||||||
|
time.sleep(2)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
print("Cannot parse video information.:{}".format(video_id))
|
||||||
|
if Arguments().save_error_data:
|
||||||
|
util.save(err.doc, "ERR", ".dat")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(Arguments().video_ids) > 1:
|
||||||
|
print(f"\n{'-' * 10} video:{counter + 1} of {len(Arguments().video_ids)} {'-' * 10}")
|
||||||
|
print(f"\n"
|
||||||
|
f" video_id: {video_id}\n"
|
||||||
|
f" channel: {info.get_channel_name()}\n"
|
||||||
|
f" title: {info.get_title()}")
|
||||||
|
|
||||||
|
print(f" output path: {path.resolve()}")
|
||||||
|
duration = info.get_duration()
|
||||||
|
pbar = ProgressBar(total=(duration * 1000), status="Extracting")
|
||||||
|
ex = Extractor(video_id,
|
||||||
|
callback=pbar._disp,
|
||||||
|
div=10)
|
||||||
|
signal.signal(signal.SIGINT, (lambda a, b: cancel(ex, pbar)))
|
||||||
|
data = ex.extract()
|
||||||
|
if data == []:
|
||||||
|
return False
|
||||||
|
pbar.reset("#", "=", total=len(data), status="Rendering ")
|
||||||
|
processor = HTMLArchiver(Arguments().output + video_id + '.html', callback=pbar._disp)
|
||||||
|
processor.process(
|
||||||
|
[{'video_id': None,
|
||||||
|
'timeout': 1,
|
||||||
|
'chatdata': (action["replayChatItemAction"]["actions"][0] for action in data)}]
|
||||||
|
)
|
||||||
|
processor.finalize()
|
||||||
|
pbar.reset('#', '#', status='Completed ')
|
||||||
|
pbar.close()
|
||||||
|
print()
|
||||||
|
if pbar.is_cancelled():
|
||||||
|
print("\nThe extraction process has been discontinued.\n")
|
||||||
|
except InvalidVideoIdException:
|
||||||
|
print("Invalid Video ID or URL:", video_id)
|
||||||
|
except NoContents as e:
|
||||||
|
print(e)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print("The specified directory does not exist.:{}".format(Arguments().output))
|
||||||
|
except JSONDecodeError as e:
|
||||||
|
print(e.msg)
|
||||||
|
print("JSONDecodeError.:{}".format(video_id))
|
||||||
|
if Arguments().save_error_data:
|
||||||
|
util.save(e.doc, "ERR_JSON_DECODE", ".dat")
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
def _disp_progress(a, b):
|
def cancel(ex, pbar):
|
||||||
print('.', end="", flush=True)
|
ex.cancel()
|
||||||
|
pbar.cancel()
|
||||||
|
|||||||
@@ -16,8 +16,9 @@ class Arguments(metaclass=Singleton):
|
|||||||
|
|
||||||
class Name:
|
class Name:
|
||||||
VERSION: str = 'version'
|
VERSION: str = 'version'
|
||||||
OUTPUT: str = 'output'
|
OUTPUT: str = 'output_dir'
|
||||||
VIDEO: str = 'video'
|
VIDEO_IDS: str = 'video_id'
|
||||||
|
SAVE_ERROR_DATA: bool = 'save_error_data'
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
arguments: Optional[Dict[str, Union[str, bool, int]]] = None):
|
arguments: Optional[Dict[str, Union[str, bool, int]]] = None):
|
||||||
@@ -34,7 +35,9 @@ class Arguments(metaclass=Singleton):
|
|||||||
self.print_version: bool = arguments[Arguments.Name.VERSION]
|
self.print_version: bool = arguments[Arguments.Name.VERSION]
|
||||||
self.output: str = arguments[Arguments.Name.OUTPUT]
|
self.output: str = arguments[Arguments.Name.OUTPUT]
|
||||||
self.video_ids: List[int] = []
|
self.video_ids: List[int] = []
|
||||||
|
self.save_error_data: bool = arguments[Arguments.Name.SAVE_ERROR_DATA]
|
||||||
|
|
||||||
# Videos
|
# Videos
|
||||||
if arguments[Arguments.Name.VIDEO]:
|
if arguments[Arguments.Name.VIDEO_IDS]:
|
||||||
self.video_ids = [video_id
|
self.video_ids = [video_id
|
||||||
for video_id in arguments[Arguments.Name.VIDEO].split(',')]
|
for video_id in arguments[Arguments.Name.VIDEO_IDS].split(',')]
|
||||||
|
|||||||
56
pytchat/cli/progressbar.py
Normal file
56
pytchat/cli/progressbar.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
'''
|
||||||
|
This code for this progress bar is based on
|
||||||
|
vladignatyev/progress.py
|
||||||
|
https://gist.github.com/vladignatyev/06860ec2040cb497f0f3
|
||||||
|
(MIT License)
|
||||||
|
'''
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressBar:
|
||||||
|
def __init__(self, total, status):
|
||||||
|
self._bar_len = 60
|
||||||
|
self._cancelled = False
|
||||||
|
self.reset(total=total, status=status)
|
||||||
|
self._blinker = 0
|
||||||
|
|
||||||
|
def reset(self, symbol_done="=", symbol_space=" ", total=100, status=''):
|
||||||
|
self.con_width = shutil.get_terminal_size(fallback=(80, 24)).columns
|
||||||
|
self._symbol_done = symbol_done
|
||||||
|
self._symbol_space = symbol_space
|
||||||
|
self._total = total
|
||||||
|
self._status = status
|
||||||
|
self._count = 0
|
||||||
|
|
||||||
|
def _disp(self, _, fetched):
|
||||||
|
self._progress(fetched, self._total)
|
||||||
|
|
||||||
|
def _progress(self, fillin, total):
|
||||||
|
if total == 0 or self._cancelled:
|
||||||
|
return
|
||||||
|
self._count += fillin
|
||||||
|
filled_len = int(round(self._bar_len * self._count / float(total)))
|
||||||
|
percents = round(100.0 * self._count / float(total), 1)
|
||||||
|
if percents > 100:
|
||||||
|
percents = 100.0
|
||||||
|
if filled_len > self._bar_len:
|
||||||
|
filled_len = self._bar_len
|
||||||
|
|
||||||
|
bar = self._symbol_done * filled_len + \
|
||||||
|
self._symbol_space * (self._bar_len - filled_len)
|
||||||
|
disp = f" [{bar}] {percents:>5.1f}% ...{self._status} "[:self.con_width - 1] + '\r'
|
||||||
|
|
||||||
|
sys.stdout.write(disp)
|
||||||
|
sys.stdout.flush()
|
||||||
|
self._blinker += 1
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if not self._cancelled:
|
||||||
|
self._progress(self._total, self._total)
|
||||||
|
|
||||||
|
def cancel(self):
|
||||||
|
self._cancelled = True
|
||||||
|
|
||||||
|
def is_cancelled(self):
|
||||||
|
return self._cancelled
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
from . import mylogger
|
from . import mylogger
|
||||||
headers = {
|
headers = {
|
||||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36'}
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def logger(module_name: str, loglevel=None):
|
def logger(module_name: str, loglevel=None):
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import aiohttp
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import httpx
|
||||||
import json
|
import json
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from aiohttp.client_exceptions import ClientConnectorError
|
|
||||||
from concurrent.futures import CancelledError
|
|
||||||
from asyncio import Queue
|
from asyncio import Queue
|
||||||
|
from concurrent.futures import CancelledError
|
||||||
from .buffer import Buffer
|
from .buffer import Buffer
|
||||||
from ..parser.live import Parser
|
from ..parser.live import Parser
|
||||||
from .. import config
|
from .. import config
|
||||||
@@ -15,13 +15,14 @@ from .. import exceptions
|
|||||||
from ..paramgen import liveparam, arcparam
|
from ..paramgen import liveparam, arcparam
|
||||||
from ..processors.default.processor import DefaultProcessor
|
from ..processors.default.processor import DefaultProcessor
|
||||||
from ..processors.combinator import Combinator
|
from ..processors.combinator import Combinator
|
||||||
|
from ..util.extract_video_id import extract_video_id
|
||||||
|
|
||||||
headers = config.headers
|
headers = config.headers
|
||||||
MAX_RETRY = 10
|
MAX_RETRY = 10
|
||||||
|
|
||||||
|
|
||||||
class LiveChatAsync:
|
class LiveChatAsync:
|
||||||
'''asyncio(aiohttp)を利用してYouTubeのライブ配信のチャットデータを取得する。
|
'''asyncioを利用してYouTubeのライブ配信のチャットデータを取得する。
|
||||||
|
|
||||||
Parameter
|
Parameter
|
||||||
---------
|
---------
|
||||||
@@ -86,7 +87,7 @@ class LiveChatAsync:
|
|||||||
topchat_only=False,
|
topchat_only=False,
|
||||||
logger=config.logger(__name__),
|
logger=config.logger(__name__),
|
||||||
):
|
):
|
||||||
self._video_id = video_id
|
self._video_id = extract_video_id(video_id)
|
||||||
self.seektime = seektime
|
self.seektime = seektime
|
||||||
if isinstance(processor, tuple):
|
if isinstance(processor, tuple):
|
||||||
self.processor = Combinator(processor)
|
self.processor = Combinator(processor)
|
||||||
@@ -160,11 +161,11 @@ class LiveChatAsync:
|
|||||||
parameter for next chat data
|
parameter for next chat data
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
async with aiohttp.ClientSession() as session:
|
async with httpx.AsyncClient(http2=True) as client:
|
||||||
while(continuation and self._is_alive):
|
while(continuation and self._is_alive):
|
||||||
continuation = await self._check_pause(continuation)
|
continuation = await self._check_pause(continuation)
|
||||||
contents = await self._get_contents(
|
contents = await self._get_contents(
|
||||||
continuation, session, headers)
|
continuation, client, headers)
|
||||||
metadata, chatdata = self._parser.parse(contents)
|
metadata, chatdata = self._parser.parse(contents)
|
||||||
|
|
||||||
timeout = metadata['timeoutMs'] / 1000
|
timeout = metadata['timeoutMs'] / 1000
|
||||||
@@ -209,7 +210,7 @@ class LiveChatAsync:
|
|||||||
self._video_id, 3, self._topchat_only)
|
self._video_id, 3, self._topchat_only)
|
||||||
return continuation
|
return continuation
|
||||||
|
|
||||||
async def _get_contents(self, continuation, session, headers):
|
async def _get_contents(self, continuation, client, headers):
|
||||||
'''Get 'continuationContents' from livechat json.
|
'''Get 'continuationContents' from livechat json.
|
||||||
If contents is None at first fetching,
|
If contents is None at first fetching,
|
||||||
try to fetch archive chat data.
|
try to fetch archive chat data.
|
||||||
@@ -218,7 +219,7 @@ class LiveChatAsync:
|
|||||||
-------
|
-------
|
||||||
'continuationContents' which includes metadata & chatdata.
|
'continuationContents' which includes metadata & chatdata.
|
||||||
'''
|
'''
|
||||||
livechat_json = await self._get_livechat_json(continuation, session, headers)
|
livechat_json = await self._get_livechat_json(continuation, client, headers)
|
||||||
contents = self._parser.get_contents(livechat_json)
|
contents = self._parser.get_contents(livechat_json)
|
||||||
if self._first_fetch:
|
if self._first_fetch:
|
||||||
if contents is None or self._is_replay:
|
if contents is None or self._is_replay:
|
||||||
@@ -228,18 +229,18 @@ class LiveChatAsync:
|
|||||||
continuation = arcparam.getparam(
|
continuation = arcparam.getparam(
|
||||||
self._video_id, self.seektime, self._topchat_only)
|
self._video_id, self.seektime, self._topchat_only)
|
||||||
livechat_json = (await self._get_livechat_json(
|
livechat_json = (await self._get_livechat_json(
|
||||||
continuation, session, headers))
|
continuation, client, headers))
|
||||||
reload_continuation = self._parser.reload_continuation(
|
reload_continuation = self._parser.reload_continuation(
|
||||||
self._parser.get_contents(livechat_json))
|
self._parser.get_contents(livechat_json))
|
||||||
if reload_continuation:
|
if reload_continuation:
|
||||||
livechat_json = (await self._get_livechat_json(
|
livechat_json = (await self._get_livechat_json(
|
||||||
reload_continuation, session, headers))
|
reload_continuation, client, headers))
|
||||||
contents = self._parser.get_contents(livechat_json)
|
contents = self._parser.get_contents(livechat_json)
|
||||||
self._is_replay = True
|
self._is_replay = True
|
||||||
self._first_fetch = False
|
self._first_fetch = False
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
async def _get_livechat_json(self, continuation, session, headers):
|
async def _get_livechat_json(self, continuation, client, headers):
|
||||||
'''
|
'''
|
||||||
Get json which includes chat data.
|
Get json which includes chat data.
|
||||||
'''
|
'''
|
||||||
@@ -248,14 +249,13 @@ class LiveChatAsync:
|
|||||||
status_code = 0
|
status_code = 0
|
||||||
url = f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
|
url = f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
|
||||||
for _ in range(MAX_RETRY + 1):
|
for _ in range(MAX_RETRY + 1):
|
||||||
async with session.get(url, headers=headers) as resp:
|
try:
|
||||||
try:
|
resp = await client.get(url, headers=headers)
|
||||||
text = await resp.text()
|
livechat_json = resp.json()
|
||||||
livechat_json = json.loads(text)
|
break
|
||||||
break
|
except (httpx.HTTPError, json.JSONDecodeError):
|
||||||
except (ClientConnectorError, json.JSONDecodeError):
|
await asyncio.sleep(1)
|
||||||
await asyncio.sleep(1)
|
continue
|
||||||
continue
|
|
||||||
else:
|
else:
|
||||||
self._logger.error(f"[{self._video_id}]"
|
self._logger.error(f"[{self._video_id}]"
|
||||||
f"Exceeded retry count. status_code={status_code}")
|
f"Exceeded retry count. status_code={status_code}")
|
||||||
@@ -333,12 +333,12 @@ class LiveChatAsync:
|
|||||||
'''
|
'''
|
||||||
if self.is_alive():
|
if self.is_alive():
|
||||||
self.terminate()
|
self.terminate()
|
||||||
try:
|
try:
|
||||||
self.listen_task.result()
|
self.listen_task.result()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
if not isinstance(e, exceptions.ChatParseException):
|
if not isinstance(e, exceptions.ChatParseException):
|
||||||
self._logger.error(f'Internal exception - {type(e)}{str(e)}')
|
self._logger.error(f'Internal exception - {type(e)}{str(e)}')
|
||||||
self._logger.info(f'[{self._video_id}]終了しました')
|
self._logger.info(f'[{self._video_id}]終了しました')
|
||||||
|
|
||||||
def raise_for_status(self):
|
def raise_for_status(self):
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import requests
|
import httpx
|
||||||
import json
|
import json
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
@@ -14,6 +14,7 @@ from .. import exceptions
|
|||||||
from ..paramgen import liveparam, arcparam
|
from ..paramgen import liveparam, arcparam
|
||||||
from ..processors.default.processor import DefaultProcessor
|
from ..processors.default.processor import DefaultProcessor
|
||||||
from ..processors.combinator import Combinator
|
from ..processors.combinator import Combinator
|
||||||
|
from ..util.extract_video_id import extract_video_id
|
||||||
|
|
||||||
headers = config.headers
|
headers = config.headers
|
||||||
MAX_RETRY = 10
|
MAX_RETRY = 10
|
||||||
@@ -84,7 +85,7 @@ class LiveChat:
|
|||||||
topchat_only=False,
|
topchat_only=False,
|
||||||
logger=config.logger(__name__)
|
logger=config.logger(__name__)
|
||||||
):
|
):
|
||||||
self._video_id = video_id
|
self._video_id = extract_video_id(video_id)
|
||||||
self.seektime = seektime
|
self.seektime = seektime
|
||||||
if isinstance(processor, tuple):
|
if isinstance(processor, tuple):
|
||||||
self.processor = Combinator(processor)
|
self.processor = Combinator(processor)
|
||||||
@@ -152,10 +153,10 @@ class LiveChat:
|
|||||||
parameter for next chat data
|
parameter for next chat data
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
with requests.Session() as session:
|
with httpx.Client(http2=True) as client:
|
||||||
while(continuation and self._is_alive):
|
while(continuation and self._is_alive):
|
||||||
continuation = self._check_pause(continuation)
|
continuation = self._check_pause(continuation)
|
||||||
contents = self._get_contents(continuation, session, headers)
|
contents = self._get_contents(continuation, client, headers)
|
||||||
metadata, chatdata = self._parser.parse(contents)
|
metadata, chatdata = self._parser.parse(contents)
|
||||||
timeout = metadata['timeoutMs'] / 1000
|
timeout = metadata['timeoutMs'] / 1000
|
||||||
chat_component = {
|
chat_component = {
|
||||||
@@ -198,7 +199,7 @@ class LiveChat:
|
|||||||
continuation = liveparam.getparam(self._video_id, 3)
|
continuation = liveparam.getparam(self._video_id, 3)
|
||||||
return continuation
|
return continuation
|
||||||
|
|
||||||
def _get_contents(self, continuation, session, headers):
|
def _get_contents(self, continuation, client, headers):
|
||||||
'''Get 'continuationContents' from livechat json.
|
'''Get 'continuationContents' from livechat json.
|
||||||
If contents is None at first fetching,
|
If contents is None at first fetching,
|
||||||
try to fetch archive chat data.
|
try to fetch archive chat data.
|
||||||
@@ -208,7 +209,7 @@ class LiveChat:
|
|||||||
'continuationContents' which includes metadata & chat data.
|
'continuationContents' which includes metadata & chat data.
|
||||||
'''
|
'''
|
||||||
livechat_json = (
|
livechat_json = (
|
||||||
self._get_livechat_json(continuation, session, headers)
|
self._get_livechat_json(continuation, client, headers)
|
||||||
)
|
)
|
||||||
contents = self._parser.get_contents(livechat_json)
|
contents = self._parser.get_contents(livechat_json)
|
||||||
if self._first_fetch:
|
if self._first_fetch:
|
||||||
@@ -218,18 +219,18 @@ class LiveChat:
|
|||||||
self._fetch_url = "live_chat_replay/get_live_chat_replay?continuation="
|
self._fetch_url = "live_chat_replay/get_live_chat_replay?continuation="
|
||||||
continuation = arcparam.getparam(
|
continuation = arcparam.getparam(
|
||||||
self._video_id, self.seektime, self._topchat_only)
|
self._video_id, self.seektime, self._topchat_only)
|
||||||
livechat_json = (self._get_livechat_json(continuation, session, headers))
|
livechat_json = (self._get_livechat_json(continuation, client, headers))
|
||||||
reload_continuation = self._parser.reload_continuation(
|
reload_continuation = self._parser.reload_continuation(
|
||||||
self._parser.get_contents(livechat_json))
|
self._parser.get_contents(livechat_json))
|
||||||
if reload_continuation:
|
if reload_continuation:
|
||||||
livechat_json = (self._get_livechat_json(
|
livechat_json = (self._get_livechat_json(
|
||||||
reload_continuation, session, headers))
|
reload_continuation, client, headers))
|
||||||
contents = self._parser.get_contents(livechat_json)
|
contents = self._parser.get_contents(livechat_json)
|
||||||
self._is_replay = True
|
self._is_replay = True
|
||||||
self._first_fetch = False
|
self._first_fetch = False
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
def _get_livechat_json(self, continuation, session, headers):
|
def _get_livechat_json(self, continuation, client, headers):
|
||||||
'''
|
'''
|
||||||
Get json which includes chat data.
|
Get json which includes chat data.
|
||||||
'''
|
'''
|
||||||
@@ -238,10 +239,9 @@ class LiveChat:
|
|||||||
status_code = 0
|
status_code = 0
|
||||||
url = f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
|
url = f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
|
||||||
for _ in range(MAX_RETRY + 1):
|
for _ in range(MAX_RETRY + 1):
|
||||||
with session.get(url, headers=headers) as resp:
|
with client:
|
||||||
try:
|
try:
|
||||||
text = resp.text
|
livechat_json = client.get(url, headers=headers).json()
|
||||||
livechat_json = json.loads(text)
|
|
||||||
break
|
break
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -324,12 +324,12 @@ class LiveChat:
|
|||||||
'''
|
'''
|
||||||
if self.is_alive():
|
if self.is_alive():
|
||||||
self.terminate()
|
self.terminate()
|
||||||
try:
|
try:
|
||||||
self.listen_task.result()
|
self.listen_task.result()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.exception = e
|
self.exception = e
|
||||||
if not isinstance(e, exceptions.ChatParseException):
|
if not isinstance(e, exceptions.ChatParseException):
|
||||||
self._logger.error(f'Internal exception - {type(e)}{str(e)}')
|
self._logger.error(f'Internal exception - {type(e)}{str(e)}')
|
||||||
self._logger.info(f'[{self._video_id}]終了しました')
|
self._logger.info(f'[{self._video_id}]終了しました')
|
||||||
|
|
||||||
def raise_for_status(self):
|
def raise_for_status(self):
|
||||||
|
|||||||
@@ -38,7 +38,9 @@ class InvalidVideoIdException(Exception):
|
|||||||
'''
|
'''
|
||||||
Thrown when the video_id is not exist (VideoInfo).
|
Thrown when the video_id is not exist (VideoInfo).
|
||||||
'''
|
'''
|
||||||
pass
|
def __init__(self, doc):
|
||||||
|
self.msg = "InvalidVideoIdException"
|
||||||
|
self.doc = doc
|
||||||
|
|
||||||
|
|
||||||
class UnknownConnectionError(Exception):
|
class UnknownConnectionError(Exception):
|
||||||
@@ -47,7 +49,7 @@ class UnknownConnectionError(Exception):
|
|||||||
|
|
||||||
class RetryExceedMaxCount(Exception):
|
class RetryExceedMaxCount(Exception):
|
||||||
'''
|
'''
|
||||||
thrown when the number of retries exceeds the maximum value.
|
Thrown when the number of retries exceeds the maximum value.
|
||||||
'''
|
'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -62,3 +64,18 @@ class ReceivedUnknownContinuation(ChatParseException):
|
|||||||
|
|
||||||
class FailedExtractContinuation(ChatDataFinished):
|
class FailedExtractContinuation(ChatDataFinished):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class VideoInfoParseError(Exception):
|
||||||
|
'''
|
||||||
|
Base exception when parsing video info.
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class PatternUnmatchError(VideoInfoParseError):
|
||||||
|
'''
|
||||||
|
Thrown when failed to parse video info with unmatched pattern.
|
||||||
|
'''
|
||||||
|
def __init__(self, doc):
|
||||||
|
self.msg = "PatternUnmatchError"
|
||||||
|
self.doc = doc
|
||||||
|
|||||||
@@ -12,4 +12,4 @@ class LiveChatLegacyPaidMessageRenderer(BaseRenderer):
|
|||||||
def get_message(self, renderer):
|
def get_message(self, renderer):
|
||||||
message = (renderer["eventText"]["runs"][0]["text"]
|
message = (renderer["eventText"]["runs"][0]["text"]
|
||||||
) + ' / ' + (renderer["detailText"]["simpleText"])
|
) + ' / ' + (renderer["detailText"]["simpleText"])
|
||||||
return message
|
return message, [message]
|
||||||
|
|||||||
@@ -4,6 +4,10 @@ from .base import BaseRenderer
|
|||||||
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
||||||
|
|
||||||
|
|
||||||
|
class Colors:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class LiveChatPaidMessageRenderer(BaseRenderer):
|
class LiveChatPaidMessageRenderer(BaseRenderer):
|
||||||
def __init__(self, item):
|
def __init__(self, item):
|
||||||
super().__init__(item, "superChat")
|
super().__init__(item, "superChat")
|
||||||
@@ -18,6 +22,7 @@ class LiveChatPaidMessageRenderer(BaseRenderer):
|
|||||||
self.currency = currency.symbols[symbol]["fxtext"] if currency.symbols.get(
|
self.currency = currency.symbols[symbol]["fxtext"] if currency.symbols.get(
|
||||||
symbol) else symbol
|
symbol) else symbol
|
||||||
self.bgColor = self.renderer.get("bodyBackgroundColor", 0)
|
self.bgColor = self.renderer.get("bodyBackgroundColor", 0)
|
||||||
|
self.colors = self.get_colors()
|
||||||
|
|
||||||
def get_amountdata(self, renderer):
|
def get_amountdata(self, renderer):
|
||||||
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
||||||
@@ -29,3 +34,13 @@ class LiveChatPaidMessageRenderer(BaseRenderer):
|
|||||||
symbol = ""
|
symbol = ""
|
||||||
amount = 0.0
|
amount = 0.0
|
||||||
return amountDisplayString, symbol, amount
|
return amountDisplayString, symbol, amount
|
||||||
|
|
||||||
|
def get_colors(self):
|
||||||
|
colors = Colors()
|
||||||
|
colors.headerBackgroundColor = self.renderer.get("headerBackgroundColor", 0)
|
||||||
|
colors.headerTextColor = self.renderer.get("headerTextColor", 0)
|
||||||
|
colors.bodyBackgroundColor = self.renderer.get("bodyBackgroundColor", 0)
|
||||||
|
colors.bodyTextColor = self.renderer.get("bodyTextColor", 0)
|
||||||
|
colors.timestampColor = self.renderer.get("timestampColor", 0)
|
||||||
|
colors.authorNameTextColor = self.renderer.get("authorNameTextColor", 0)
|
||||||
|
return colors
|
||||||
|
|||||||
@@ -4,6 +4,10 @@ from .base import BaseRenderer
|
|||||||
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
||||||
|
|
||||||
|
|
||||||
|
class Colors:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class LiveChatPaidStickerRenderer(BaseRenderer):
|
class LiveChatPaidStickerRenderer(BaseRenderer):
|
||||||
def __init__(self, item):
|
def __init__(self, item):
|
||||||
super().__init__(item, "superSticker")
|
super().__init__(item, "superSticker")
|
||||||
@@ -17,9 +21,10 @@ class LiveChatPaidStickerRenderer(BaseRenderer):
|
|||||||
self.amountString = amountDisplayString
|
self.amountString = amountDisplayString
|
||||||
self.currency = currency.symbols[symbol]["fxtext"] if currency.symbols.get(
|
self.currency = currency.symbols[symbol]["fxtext"] if currency.symbols.get(
|
||||||
symbol) else symbol
|
symbol) else symbol
|
||||||
self.bgColor = self.renderer.get("moneyChipBackgroundColor", 0)
|
self.bgColor = self.renderer.get("backgroundColor", 0)
|
||||||
self.sticker = "https:" + \
|
self.sticker = "".join(("https:",
|
||||||
self.renderer["sticker"]["thumbnails"][0]["url"]
|
self.renderer["sticker"]["thumbnails"][0]["url"]))
|
||||||
|
self.colors = self.get_colors()
|
||||||
|
|
||||||
def get_amountdata(self, renderer):
|
def get_amountdata(self, renderer):
|
||||||
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
||||||
@@ -31,3 +36,11 @@ class LiveChatPaidStickerRenderer(BaseRenderer):
|
|||||||
symbol = ""
|
symbol = ""
|
||||||
amount = 0.0
|
amount = 0.0
|
||||||
return amountDisplayString, symbol, amount
|
return amountDisplayString, symbol, amount
|
||||||
|
|
||||||
|
def get_colors(self):
|
||||||
|
colors = Colors()
|
||||||
|
colors.moneyChipBackgroundColor = self.renderer.get("moneyChipBackgroundColor", 0)
|
||||||
|
colors.moneyChipTextColor = self.renderer.get("moneyChipTextColor", 0)
|
||||||
|
colors.backgroundColor = self.renderer.get("backgroundColor", 0)
|
||||||
|
colors.authorNameTextColor = self.renderer.get("authorNameTextColor", 0)
|
||||||
|
return colors
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import requests
|
import httpx
|
||||||
from base64 import standard_b64encode
|
from base64 import standard_b64encode
|
||||||
from .chat_processor import ChatProcessor
|
from .chat_processor import ChatProcessor
|
||||||
from .default.processor import DefaultProcessor
|
from .default.processor import DefaultProcessor
|
||||||
@@ -12,9 +12,9 @@ fmt_headers = ['datetime', 'elapsed', 'authorName',
|
|||||||
'message', 'superchat', 'type', 'authorChannel']
|
'message', 'superchat', 'type', 'authorChannel']
|
||||||
|
|
||||||
HEADER_HTML = '''
|
HEADER_HTML = '''
|
||||||
<meta http-equiv="Content-Type" content="text/html;charset=UTF-8">
|
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
|
<meta http-equiv="Content-Type" content="text/html;charset=UTF-8">
|
||||||
'''
|
'''
|
||||||
|
|
||||||
TABLE_CSS = '''
|
TABLE_CSS = '''
|
||||||
@@ -43,20 +43,21 @@ class HTMLArchiver(ChatProcessor):
|
|||||||
'''
|
'''
|
||||||
HTMLArchiver saves chat data as HTML table format.
|
HTMLArchiver saves chat data as HTML table format.
|
||||||
'''
|
'''
|
||||||
def __init__(self, save_path):
|
def __init__(self, save_path, callback=None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.save_path = self._checkpath(save_path)
|
self.save_path = self._checkpath(save_path)
|
||||||
self.processor = DefaultProcessor()
|
self.processor = DefaultProcessor()
|
||||||
self.emoji_table = {} # table for custom emojis. key: emoji_id, value: base64 encoded image binary.
|
self.emoji_table = {} # tuble for custom emojis. key: emoji_id, value: base64 encoded image binary.
|
||||||
self.header = [HEADER_HTML]
|
self.header = [HEADER_HTML]
|
||||||
self.body = ['<body>\n', '<table class="css">\n', self._parse_table_header(fmt_headers)]
|
self.body = ['<body>\n', '<table class="css">\n', self._parse_table_header(fmt_headers)]
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
def _checkpath(self, filepath):
|
def _checkpath(self, filepath):
|
||||||
splitter = os.path.splitext(os.path.basename(filepath))
|
splitter = os.path.splitext(os.path.basename(filepath))
|
||||||
body = splitter[0]
|
body = splitter[0]
|
||||||
extention = splitter[1]
|
extention = splitter[1]
|
||||||
newpath = filepath
|
newpath = filepath
|
||||||
counter = 0
|
counter = 1
|
||||||
while os.path.exists(newpath):
|
while os.path.exists(newpath):
|
||||||
match = re.search(PATTERN, body)
|
match = re.search(PATTERN, body)
|
||||||
if match:
|
if match:
|
||||||
@@ -80,17 +81,20 @@ class HTMLArchiver(ChatProcessor):
|
|||||||
"""
|
"""
|
||||||
if chat_components is None or len(chat_components) == 0:
|
if chat_components is None or len(chat_components) == 0:
|
||||||
return
|
return
|
||||||
self.body.extend(
|
for c in self.processor.process(chat_components).items:
|
||||||
(self._parse_html_line((
|
self.body.extend(
|
||||||
c.datetime,
|
self._parse_html_line((
|
||||||
c.elapsedTime,
|
c.datetime,
|
||||||
c.author.name,
|
c.elapsedTime,
|
||||||
self._parse_message(c.messageEx),
|
c.author.name,
|
||||||
c.amountString,
|
self._parse_message(c.messageEx),
|
||||||
c.author.type,
|
c.amountString,
|
||||||
c.author.channelId)
|
c.author.type,
|
||||||
) for c in self.processor.process(chat_components).items)
|
c.author.channelId)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
if self.callback:
|
||||||
|
self.callback(None, 1)
|
||||||
|
|
||||||
def _parse_html_line(self, raw_line):
|
def _parse_html_line(self, raw_line):
|
||||||
return ''.join(('<tr>',
|
return ''.join(('<tr>',
|
||||||
@@ -108,7 +112,7 @@ class HTMLArchiver(ChatProcessor):
|
|||||||
for item in message_items)
|
for item in message_items)
|
||||||
|
|
||||||
def _encode_img(self, url):
|
def _encode_img(self, url):
|
||||||
resp = requests.get(url)
|
resp = httpx.get(url)
|
||||||
return standard_b64encode(resp.content).decode()
|
return standard_b64encode(resp.content).decode()
|
||||||
|
|
||||||
def _set_emoji_table(self, item: dict):
|
def _set_emoji_table(self, item: dict):
|
||||||
@@ -131,7 +135,7 @@ class HTMLArchiver(ChatProcessor):
|
|||||||
|
|
||||||
def finalize(self):
|
def finalize(self):
|
||||||
self.header.extend([self._create_styles(), '</head>\n'])
|
self.header.extend([self._create_styles(), '</head>\n'])
|
||||||
self.body.extend(['</table>\n</body>'])
|
self.body.extend(['</table>\n</body>\n</html>'])
|
||||||
with open(self.save_path, mode='a', encoding='utf-8') as f:
|
with open(self.save_path, mode='a', encoding='utf-8') as f:
|
||||||
f.writelines(self.header)
|
f.writelines(self.header)
|
||||||
f.writelines(self.body)
|
f.writelines(self.body)
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import aiohttp
|
import httpx
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
|
||||||
from . import parser
|
from . import parser
|
||||||
from . block import Block
|
from . block import Block
|
||||||
from . worker import ExtractWorker
|
from . worker import ExtractWorker
|
||||||
@@ -17,6 +16,9 @@ REPLAY_URL = "https://www.youtube.com/live_chat_replay/" \
|
|||||||
"get_live_chat_replay?continuation="
|
"get_live_chat_replay?continuation="
|
||||||
MAX_RETRY_COUNT = 3
|
MAX_RETRY_COUNT = 3
|
||||||
|
|
||||||
|
# Set to avoid duplicate parameters
|
||||||
|
param_set = set()
|
||||||
|
|
||||||
|
|
||||||
def _split(start, end, count, min_interval_sec=120):
|
def _split(start, end, count, min_interval_sec=120):
|
||||||
"""
|
"""
|
||||||
@@ -51,11 +53,12 @@ def _split(start, end, count, min_interval_sec=120):
|
|||||||
|
|
||||||
|
|
||||||
def ready_blocks(video_id, duration, div, callback):
|
def ready_blocks(video_id, duration, div, callback):
|
||||||
|
param_set.clear()
|
||||||
if div <= 0:
|
if div <= 0:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
async def _get_blocks(video_id, duration, div, callback):
|
async def _get_blocks(video_id, duration, div, callback):
|
||||||
async with aiohttp.ClientSession() as session:
|
async with httpx.AsyncClient(http2=True) as session:
|
||||||
tasks = [_create_block(session, video_id, seektime, callback)
|
tasks = [_create_block(session, video_id, seektime, callback)
|
||||||
for seektime in _split(-1, duration, div)]
|
for seektime in _split(-1, duration, div)]
|
||||||
return await asyncio.gather(*tasks)
|
return await asyncio.gather(*tasks)
|
||||||
@@ -65,9 +68,12 @@ def ready_blocks(video_id, duration, div, callback):
|
|||||||
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
|
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
|
||||||
for _ in range(MAX_RETRY_COUNT):
|
for _ in range(MAX_RETRY_COUNT):
|
||||||
try:
|
try:
|
||||||
async with session.get(url, headers=headers) as resp:
|
if continuation in param_set:
|
||||||
text = await resp.text()
|
next_continuation, actions = None, []
|
||||||
next_continuation, actions = parser.parse(json.loads(text))
|
break
|
||||||
|
param_set.add(continuation)
|
||||||
|
resp = await session.get(url, headers=headers)
|
||||||
|
next_continuation, actions = parser.parse(resp.json())
|
||||||
break
|
break
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
await asyncio.sleep(3)
|
await asyncio.sleep(3)
|
||||||
@@ -106,7 +112,7 @@ def fetch_patch(callback, blocks, video_id):
|
|||||||
)
|
)
|
||||||
for block in blocks
|
for block in blocks
|
||||||
]
|
]
|
||||||
async with aiohttp.ClientSession() as session:
|
async with httpx.AsyncClient() as session:
|
||||||
tasks = [worker.run(session) for worker in workers]
|
tasks = [worker.run(session) for worker in workers]
|
||||||
return await asyncio.gather(*tasks)
|
return await asyncio.gather(*tasks)
|
||||||
|
|
||||||
@@ -114,9 +120,12 @@ def fetch_patch(callback, blocks, video_id):
|
|||||||
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
|
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
|
||||||
for _ in range(MAX_RETRY_COUNT):
|
for _ in range(MAX_RETRY_COUNT):
|
||||||
try:
|
try:
|
||||||
async with session.get(url, headers=config.headers) as resp:
|
if continuation in param_set:
|
||||||
chat_json = await resp.text()
|
continuation, actions = None, []
|
||||||
continuation, actions = parser.parse(json.loads(chat_json))
|
break
|
||||||
|
param_set.add(continuation)
|
||||||
|
resp = await session.get(url, headers=config.headers)
|
||||||
|
continuation, actions = parser.parse(resp.json())
|
||||||
break
|
break
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
await asyncio.sleep(3)
|
await asyncio.sleep(3)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from . import duplcheck
|
|||||||
from .. videoinfo import VideoInfo
|
from .. videoinfo import VideoInfo
|
||||||
from ... import config
|
from ... import config
|
||||||
from ... exceptions import InvalidVideoIdException
|
from ... exceptions import InvalidVideoIdException
|
||||||
|
from ... util.extract_video_id import extract_video_id
|
||||||
|
|
||||||
logger = config.logger(__name__)
|
logger = config.logger(__name__)
|
||||||
headers = config.headers
|
headers = config.headers
|
||||||
@@ -14,7 +15,7 @@ class Extractor:
|
|||||||
raise ValueError('div must be positive integer.')
|
raise ValueError('div must be positive integer.')
|
||||||
elif div > 10:
|
elif div > 10:
|
||||||
div = 10
|
div = 10
|
||||||
self.video_id = video_id
|
self.video_id = extract_video_id(video_id)
|
||||||
self.div = div
|
self.div = div
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
self.processor = processor
|
self.processor = processor
|
||||||
@@ -78,7 +79,7 @@ class Extractor:
|
|||||||
|
|
||||||
def extract(self):
|
def extract(self):
|
||||||
if self.duration == 0:
|
if self.duration == 0:
|
||||||
print("video is not archived.")
|
print("\nCannot extract chat data:\n The specified video has not yet been archived.")
|
||||||
return []
|
return []
|
||||||
data = self._execute_extract_operations()
|
data = self._execute_extract_operations()
|
||||||
if self.processor is None:
|
if self.processor is None:
|
||||||
|
|||||||
@@ -42,10 +42,14 @@ def get_offset(item):
|
|||||||
|
|
||||||
|
|
||||||
def get_id(item):
|
def get_id(item):
|
||||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
a = list(item['replayChatItemAction']["actions"][0].values())[0].get('item')
|
||||||
)[0])['item'].values())[0].get('id')
|
if a:
|
||||||
|
return list(a.values())[0].get('id')
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_type(item):
|
def get_type(item):
|
||||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
a = list(item['replayChatItemAction']["actions"][0].values())[0].get('item')
|
||||||
)[0])['item'].keys())[0]
|
if a:
|
||||||
|
return list(a.keys())[0]
|
||||||
|
return None
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
from . block import Block
|
from . block import Block
|
||||||
from . patch import fill, split
|
from . patch import fill, split
|
||||||
from ... paramgen import arcparam
|
from ... paramgen import arcparam
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
|
||||||
class ExtractWorker:
|
class ExtractWorker:
|
||||||
"""
|
"""
|
||||||
ExtractWorker associates a download session with a block.
|
ExtractWorker associates a download session with a block.
|
||||||
|
|
||||||
When the worker finishes fetching, the block
|
When the worker finishes fetching, the block
|
||||||
being fetched is splitted and assigned the free worker.
|
being fetched is splitted and assigned the free worker.
|
||||||
|
|
||||||
@@ -76,7 +76,7 @@ def _search_new_block(worker) -> Block:
|
|||||||
return new_block
|
return new_block
|
||||||
|
|
||||||
|
|
||||||
def _get_undone_block(blocks) -> (int, Block):
|
def _get_undone_block(blocks) -> Tuple[int, Block]:
|
||||||
min_interval_ms = 120000
|
min_interval_ms = 120000
|
||||||
max_remaining = 0
|
max_remaining = 0
|
||||||
undone_block = None
|
undone_block = None
|
||||||
|
|||||||
@@ -1,141 +0,0 @@
|
|||||||
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
from . import parser
|
|
||||||
from . block import Block
|
|
||||||
from . worker import ExtractWorker
|
|
||||||
from . patch import Patch
|
|
||||||
from ... import config
|
|
||||||
from ... paramgen import arcparam_mining as arcparam
|
|
||||||
from concurrent.futures import CancelledError
|
|
||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
headers = config.headers
|
|
||||||
REPLAY_URL = "https://www.youtube.com/live_chat_replay?continuation="
|
|
||||||
INTERVAL = 1
|
|
||||||
def _split(start, end, count, min_interval_sec = 120):
|
|
||||||
"""
|
|
||||||
Split section from `start` to `end` into `count` pieces,
|
|
||||||
and returns the beginning of each piece.
|
|
||||||
The `count` is adjusted so that the length of each piece
|
|
||||||
is no smaller than `min_interval`.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
--------
|
|
||||||
List of the offset of each block's first chat data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not (isinstance(start,int) or isinstance(start,float)) or \
|
|
||||||
not (isinstance(end,int) or isinstance(end,float)):
|
|
||||||
raise ValueError("start/end must be int or float")
|
|
||||||
if not isinstance(count,int):
|
|
||||||
raise ValueError("count must be int")
|
|
||||||
if start>end:
|
|
||||||
raise ValueError("end must be equal to or greater than start.")
|
|
||||||
if count<1:
|
|
||||||
raise ValueError("count must be equal to or greater than 1.")
|
|
||||||
if (end-start)/count < min_interval_sec:
|
|
||||||
count = int((end-start)/min_interval_sec)
|
|
||||||
if count == 0 : count = 1
|
|
||||||
interval= (end-start)/count
|
|
||||||
|
|
||||||
if count == 1:
|
|
||||||
return [start]
|
|
||||||
return sorted( list(set( [int(start + interval*j)
|
|
||||||
for j in range(count) ])))
|
|
||||||
|
|
||||||
def ready_blocks(video_id, duration, div, callback):
|
|
||||||
if div <= 0: raise ValueError
|
|
||||||
|
|
||||||
async def _get_blocks( video_id, duration, div, callback):
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
tasks = [_create_block(session, video_id, seektime, callback)
|
|
||||||
for seektime in _split(0, duration, div)]
|
|
||||||
return await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def _create_block(session, video_id, seektime, callback):
|
|
||||||
continuation = arcparam.getparam(video_id, seektime = seektime)
|
|
||||||
url=(f"{REPLAY_URL}{quote(continuation)}&playerOffsetMs="
|
|
||||||
f"{int(seektime*1000)}&hidden=false&pbj=1")
|
|
||||||
async with session.get(url, headers = headers) as resp:
|
|
||||||
chat_json = await resp.text()
|
|
||||||
if chat_json is None:
|
|
||||||
return
|
|
||||||
continuation, actions = parser.parse(json.loads(chat_json)[1])
|
|
||||||
first = seektime
|
|
||||||
seektime += INTERVAL
|
|
||||||
if callback:
|
|
||||||
callback(actions, INTERVAL)
|
|
||||||
return Block(
|
|
||||||
continuation = continuation,
|
|
||||||
chat_data = actions,
|
|
||||||
first = first,
|
|
||||||
last = seektime,
|
|
||||||
seektime = seektime
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
fetch initial blocks.
|
|
||||||
"""
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
blocks = loop.run_until_complete(
|
|
||||||
_get_blocks(video_id, duration, div, callback))
|
|
||||||
return blocks
|
|
||||||
|
|
||||||
def fetch_patch(callback, blocks, video_id):
|
|
||||||
|
|
||||||
async def _allocate_workers():
|
|
||||||
workers = [
|
|
||||||
ExtractWorker(
|
|
||||||
fetch = _fetch, block = block,
|
|
||||||
blocks = blocks, video_id = video_id
|
|
||||||
)
|
|
||||||
for block in blocks
|
|
||||||
]
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
tasks = [worker.run(session) for worker in workers]
|
|
||||||
return await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
async def _fetch(seektime,session) -> Patch:
|
|
||||||
continuation = arcparam.getparam(video_id, seektime = seektime)
|
|
||||||
url=(f"{REPLAY_URL}{quote(continuation)}&playerOffsetMs="
|
|
||||||
f"{int(seektime*1000)}&hidden=false&pbj=1")
|
|
||||||
async with session.get(url,headers = config.headers) as resp:
|
|
||||||
chat_json = await resp.text()
|
|
||||||
actions = []
|
|
||||||
try:
|
|
||||||
if chat_json is None:
|
|
||||||
return Patch()
|
|
||||||
continuation, actions = parser.parse(json.loads(chat_json)[1])
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
if callback:
|
|
||||||
callback(actions, INTERVAL)
|
|
||||||
return Patch(chats = actions, continuation = continuation,
|
|
||||||
seektime = seektime, last = seektime)
|
|
||||||
"""
|
|
||||||
allocate workers and assign blocks.
|
|
||||||
"""
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
try:
|
|
||||||
loop.run_until_complete(_allocate_workers())
|
|
||||||
except CancelledError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def _shutdown():
|
|
||||||
print("\nshutdown...")
|
|
||||||
tasks = [t for t in asyncio.all_tasks()
|
|
||||||
if t is not asyncio.current_task()]
|
|
||||||
for task in tasks:
|
|
||||||
task.cancel()
|
|
||||||
try:
|
|
||||||
await task
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def cancel():
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
loop.create_task(_shutdown())
|
|
||||||
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
from . import parser
|
|
||||||
class Block:
|
|
||||||
"""Block object represents something like a box
|
|
||||||
to join chunk of chatdata.
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
---------
|
|
||||||
first : int :
|
|
||||||
videoOffsetTimeMs of the first chat_data
|
|
||||||
(chat_data[0])
|
|
||||||
|
|
||||||
last : int :
|
|
||||||
videoOffsetTimeMs of the last chat_data.
|
|
||||||
(chat_data[-1])
|
|
||||||
|
|
||||||
this value increases as fetching chatdata progresses.
|
|
||||||
|
|
||||||
end : int :
|
|
||||||
target videoOffsetTimeMs of last chat data for extract,
|
|
||||||
equals to first videoOffsetTimeMs of next block.
|
|
||||||
when extract worker reaches this offset, stop fetching.
|
|
||||||
|
|
||||||
continuation : str :
|
|
||||||
continuation param of last chat data.
|
|
||||||
|
|
||||||
chat_data : list
|
|
||||||
|
|
||||||
done : bool :
|
|
||||||
whether this block has been fetched.
|
|
||||||
|
|
||||||
remaining : int :
|
|
||||||
remaining data to extract.
|
|
||||||
equals end - last.
|
|
||||||
|
|
||||||
is_last : bool :
|
|
||||||
whether this block is the last one in blocklist.
|
|
||||||
|
|
||||||
during_split : bool :
|
|
||||||
whether this block is in the process of during_split.
|
|
||||||
while True, this block is excluded from duplicate split procedure.
|
|
||||||
|
|
||||||
seektime : float :
|
|
||||||
the last position of this block(seconds) already fetched.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ['first','last','end','continuation','chat_data','remaining',
|
|
||||||
'done','is_last','during_split','seektime']
|
|
||||||
|
|
||||||
def __init__(self, first = 0, last = 0, end = 0,
|
|
||||||
continuation = '', chat_data = [], is_last = False,
|
|
||||||
during_split = False, seektime = None):
|
|
||||||
self.first = first
|
|
||||||
self.last = last
|
|
||||||
self.end = end
|
|
||||||
self.continuation = continuation
|
|
||||||
self.chat_data = chat_data
|
|
||||||
self.done = False
|
|
||||||
self.remaining = self.end - self.last
|
|
||||||
self.is_last = is_last
|
|
||||||
self.during_split = during_split
|
|
||||||
self.seektime = seektime
|
|
||||||
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
import re
|
|
||||||
from ... import config
|
|
||||||
from ... exceptions import (
|
|
||||||
ResponseContextError,
|
|
||||||
NoContents, NoContinuation)
|
|
||||||
|
|
||||||
logger = config.logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def parse(jsn):
|
|
||||||
"""
|
|
||||||
Parse replay chat data.
|
|
||||||
Parameter:
|
|
||||||
----------
|
|
||||||
jsn : dict
|
|
||||||
JSON of replay chat data.
|
|
||||||
Returns:
|
|
||||||
------
|
|
||||||
continuation : str
|
|
||||||
actions : list
|
|
||||||
|
|
||||||
"""
|
|
||||||
if jsn is None:
|
|
||||||
raise ValueError("parameter JSON is None")
|
|
||||||
if jsn['response']['responseContext'].get('errors'):
|
|
||||||
raise ResponseContextError(
|
|
||||||
'video_id is invalid or private/deleted.')
|
|
||||||
contents = jsn["response"].get('continuationContents')
|
|
||||||
if contents is None:
|
|
||||||
raise NoContents('No chat data.')
|
|
||||||
|
|
||||||
cont = contents['liveChatContinuation']['continuations'][0]
|
|
||||||
if cont is None:
|
|
||||||
raise NoContinuation('No Continuation')
|
|
||||||
metadata = cont.get('liveChatReplayContinuationData')
|
|
||||||
if metadata:
|
|
||||||
continuation = metadata.get("continuation")
|
|
||||||
actions = contents['liveChatContinuation'].get('actions')
|
|
||||||
if continuation:
|
|
||||||
return continuation, [action["replayChatItemAction"]["actions"][0]
|
|
||||||
for action in actions
|
|
||||||
if list(action['replayChatItemAction']["actions"][0].values()
|
|
||||||
)[0]['item'].get("liveChatPaidMessageRenderer")
|
|
||||||
or list(action['replayChatItemAction']["actions"][0].values()
|
|
||||||
)[0]['item'].get("liveChatPaidStickerRenderer")
|
|
||||||
]
|
|
||||||
return None, []
|
|
||||||
|
|
||||||
|
|
||||||
def get_offset(item):
|
|
||||||
return int(item['replayChatItemAction']["videoOffsetTimeMsec"])
|
|
||||||
|
|
||||||
|
|
||||||
def get_id(item):
|
|
||||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
|
||||||
)[0])['item'].values())[0].get('id')
|
|
||||||
|
|
||||||
|
|
||||||
def get_type(item):
|
|
||||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
|
||||||
)[0])['item'].keys())[0]
|
|
||||||
|
|
||||||
|
|
||||||
_REGEX_YTINIT = re.compile(
|
|
||||||
"window\\[\"ytInitialData\"\\]\\s*=\\s*({.+?});\\s+")
|
|
||||||
|
|
||||||
|
|
||||||
def extract(text):
|
|
||||||
|
|
||||||
match = re.findall(_REGEX_YTINIT, str(text))
|
|
||||||
if match:
|
|
||||||
return match[0]
|
|
||||||
return None
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
from . import parser
|
|
||||||
from . block import Block
|
|
||||||
from typing import NamedTuple
|
|
||||||
|
|
||||||
class Patch(NamedTuple):
|
|
||||||
"""
|
|
||||||
Patch represents chunk of chat data
|
|
||||||
which is fetched by asyncdl.fetch_patch._fetch().
|
|
||||||
"""
|
|
||||||
chats : list = []
|
|
||||||
continuation : str = None
|
|
||||||
seektime : float = None
|
|
||||||
first : int = None
|
|
||||||
last : int = None
|
|
||||||
|
|
||||||
def fill(block:Block, patch:Patch):
|
|
||||||
if patch.last < block.end:
|
|
||||||
set_patch(block, patch)
|
|
||||||
return
|
|
||||||
block.continuation = None
|
|
||||||
|
|
||||||
def set_patch(block:Block, patch:Patch):
|
|
||||||
block.continuation = patch.continuation
|
|
||||||
block.chat_data.extend(patch.chats)
|
|
||||||
block.last = patch.seektime
|
|
||||||
block.seektime = patch.seektime
|
|
||||||
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
from . import asyncdl
|
|
||||||
from . import parser
|
|
||||||
from .. videoinfo import VideoInfo
|
|
||||||
from ... import config
|
|
||||||
from ... exceptions import InvalidVideoIdException
|
|
||||||
logger = config.logger(__name__)
|
|
||||||
headers=config.headers
|
|
||||||
|
|
||||||
class SuperChatMiner:
|
|
||||||
def __init__(self, video_id, duration, div, callback):
|
|
||||||
if not isinstance(div ,int) or div < 1:
|
|
||||||
raise ValueError('div must be positive integer.')
|
|
||||||
elif div > 10:
|
|
||||||
div = 10
|
|
||||||
if not isinstance(duration ,int) or duration < 1:
|
|
||||||
raise ValueError('duration must be positive integer.')
|
|
||||||
self.video_id = video_id
|
|
||||||
self.duration = duration
|
|
||||||
self.div = div
|
|
||||||
self.callback = callback
|
|
||||||
self.blocks = []
|
|
||||||
|
|
||||||
def _ready_blocks(self):
|
|
||||||
blocks = asyncdl.ready_blocks(
|
|
||||||
self.video_id, self.duration, self.div, self.callback)
|
|
||||||
self.blocks = [block for block in blocks if block is not None]
|
|
||||||
return self
|
|
||||||
|
|
||||||
def _set_block_end(self):
|
|
||||||
for i in range(len(self.blocks)-1):
|
|
||||||
self.blocks[i].end = self.blocks[i+1].first
|
|
||||||
self.blocks[-1].end = self.duration
|
|
||||||
self.blocks[-1].is_last =True
|
|
||||||
return self
|
|
||||||
|
|
||||||
def _download_blocks(self):
|
|
||||||
asyncdl.fetch_patch(self.callback, self.blocks, self.video_id)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def _combine(self):
|
|
||||||
ret = []
|
|
||||||
for block in self.blocks:
|
|
||||||
ret.extend(block.chat_data)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def extract(self):
|
|
||||||
return (
|
|
||||||
self._ready_blocks()
|
|
||||||
._set_block_end()
|
|
||||||
._download_blocks()
|
|
||||||
._combine()
|
|
||||||
)
|
|
||||||
|
|
||||||
def extract(video_id, div = 1, callback = None, processor = None):
|
|
||||||
duration = 0
|
|
||||||
try:
|
|
||||||
duration = VideoInfo(video_id).get_duration()
|
|
||||||
except InvalidVideoIdException:
|
|
||||||
raise
|
|
||||||
if duration == 0:
|
|
||||||
print("video is live.")
|
|
||||||
return []
|
|
||||||
data = SuperChatMiner(video_id, duration, div, callback).extract()
|
|
||||||
if processor is None:
|
|
||||||
return data
|
|
||||||
return processor.process(
|
|
||||||
[{'video_id':None,'timeout':1,'chatdata' : (action
|
|
||||||
for action in data)}]
|
|
||||||
)
|
|
||||||
|
|
||||||
def cancel():
|
|
||||||
asyncdl.cancel()
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
from . import parser
|
|
||||||
from . block import Block
|
|
||||||
from . patch import Patch, fill
|
|
||||||
from ... paramgen import arcparam
|
|
||||||
INTERVAL = 1
|
|
||||||
class ExtractWorker:
|
|
||||||
"""
|
|
||||||
ExtractWorker associates a download session with a block.
|
|
||||||
|
|
||||||
When the worker finishes fetching, the block
|
|
||||||
being fetched is splitted and assigned the free worker.
|
|
||||||
|
|
||||||
Parameter
|
|
||||||
----------
|
|
||||||
fetch : func :
|
|
||||||
extract function of asyncdl
|
|
||||||
|
|
||||||
block : Block :
|
|
||||||
Block object that includes chat_data
|
|
||||||
|
|
||||||
blocks : list :
|
|
||||||
List of Block(s)
|
|
||||||
|
|
||||||
video_id : str :
|
|
||||||
|
|
||||||
parent_block : Block :
|
|
||||||
the block from which current block is splitted
|
|
||||||
"""
|
|
||||||
__slots__ = ['block', 'fetch', 'blocks', 'video_id', 'parent_block']
|
|
||||||
def __init__(self, fetch, block, blocks, video_id ):
|
|
||||||
self.block:Block = block
|
|
||||||
self.fetch = fetch
|
|
||||||
self.blocks:list = blocks
|
|
||||||
self.video_id:str = video_id
|
|
||||||
self.parent_block:Block = None
|
|
||||||
|
|
||||||
async def run(self, session):
|
|
||||||
while self.block.continuation:
|
|
||||||
patch = await self.fetch(
|
|
||||||
self.block.seektime, session)
|
|
||||||
fill(self.block, patch)
|
|
||||||
self.block.seektime += INTERVAL
|
|
||||||
self.block.done = True
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,12 +1,13 @@
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import requests
|
import httpx
|
||||||
from .. import config
|
from .. import config
|
||||||
from ..exceptions import InvalidVideoIdException
|
from ..exceptions import InvalidVideoIdException, PatternUnmatchError
|
||||||
|
from ..util.extract_video_id import extract_video_id
|
||||||
|
|
||||||
headers = config.headers
|
headers = config.headers
|
||||||
|
|
||||||
pattern = re.compile(r"yt\.setConfig\({'PLAYER_CONFIG': ({.*})}\);")
|
pattern = re.compile(r"'PLAYER_CONFIG': ({.*}}})")
|
||||||
|
|
||||||
item_channel_id = [
|
item_channel_id = [
|
||||||
"videoDetails",
|
"videoDetails",
|
||||||
@@ -78,19 +79,22 @@ class VideoInfo:
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
def __init__(self, video_id):
|
def __init__(self, video_id):
|
||||||
self.video_id = video_id
|
self.video_id = extract_video_id(video_id)
|
||||||
text = self._get_page_text(video_id)
|
text = self._get_page_text(self.video_id)
|
||||||
self._parse(text)
|
self._parse(text)
|
||||||
|
|
||||||
def _get_page_text(self, video_id):
|
def _get_page_text(self, video_id):
|
||||||
url = f"https://www.youtube.com/embed/{video_id}"
|
url = f"https://www.youtube.com/embed/{video_id}"
|
||||||
resp = requests.get(url, headers=headers)
|
resp = httpx.get(url, headers=headers)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
return resp.text
|
return resp.text
|
||||||
|
|
||||||
def _parse(self, text):
|
def _parse(self, text):
|
||||||
result = re.search(pattern, text)
|
result = re.search(pattern, text)
|
||||||
res = json.loads(result.group(1))
|
if result is None:
|
||||||
|
raise PatternUnmatchError(text)
|
||||||
|
decoder = json.JSONDecoder()
|
||||||
|
res = decoder.raw_decode(result.group(1)[:-1])[0]
|
||||||
response = self._get_item(res, item_response)
|
response = self._get_item(res, item_response)
|
||||||
if response is None:
|
if response is None:
|
||||||
self._check_video_is_private(res.get("args"))
|
self._check_video_is_private(res.get("args"))
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import requests
|
import httpx
|
||||||
import json
|
import json
|
||||||
import datetime
|
import datetime
|
||||||
from .. import config
|
from .. import config
|
||||||
|
|
||||||
|
|
||||||
def extract(url):
|
def extract(url):
|
||||||
_session = requests.Session()
|
_session = httpx.Client(http2=True)
|
||||||
html = _session.get(url, headers=config.headers)
|
html = _session.get(url, headers=config.headers)
|
||||||
with open(str(datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')
|
with open(str(datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')
|
||||||
) + 'test.json', mode='w', encoding='utf-8') as f:
|
) + 'test.json', mode='w', encoding='utf-8') as f:
|
||||||
|
|||||||
25
pytchat/util/extract_video_id.py
Normal file
25
pytchat/util/extract_video_id.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import re
|
||||||
|
from .. exceptions import InvalidVideoIdException
|
||||||
|
|
||||||
|
|
||||||
|
PATTERN = re.compile(r"((?<=(v|V)/)|(?<=be/)|(?<=(\?|\&)v=)|(?<=embed/))([\w-]+)")
|
||||||
|
YT_VIDEO_ID_LENGTH = 11
|
||||||
|
|
||||||
|
|
||||||
|
def extract_video_id(url_or_id: str) -> str:
|
||||||
|
ret = ''
|
||||||
|
if type(url_or_id) != str:
|
||||||
|
raise TypeError(f"{url_or_id}: URL or VideoID must be str, but {type(url_or_id)} is passed.")
|
||||||
|
if len(url_or_id) == YT_VIDEO_ID_LENGTH:
|
||||||
|
return url_or_id
|
||||||
|
match = re.search(PATTERN, url_or_id)
|
||||||
|
if match is None:
|
||||||
|
raise InvalidVideoIdException(url_or_id)
|
||||||
|
try:
|
||||||
|
ret = match.group(4)
|
||||||
|
except IndexError:
|
||||||
|
raise InvalidVideoIdException(url_or_id)
|
||||||
|
|
||||||
|
if ret is None or len(ret) != YT_VIDEO_ID_LENGTH:
|
||||||
|
raise InvalidVideoIdException(url_or_id)
|
||||||
|
return ret
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
aiohttp
|
httpx[http2]==0.14.1
|
||||||
protobuf
|
protobuf==3.13.0
|
||||||
pytz
|
pytz
|
||||||
requests
|
|
||||||
urllib3
|
urllib3
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
aioresponses
|
|
||||||
mock
|
mock
|
||||||
mocker
|
mocker
|
||||||
pytest
|
pytest
|
||||||
pytest-mock
|
pytest_httpx
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
import requests
|
import httpx
|
||||||
import pytchat.config as config
|
import pytchat.config as config
|
||||||
from pytchat.paramgen import arcparam
|
from pytchat.paramgen import arcparam
|
||||||
from pytchat.parser.live import Parser
|
from pytchat.parser.live import Parser
|
||||||
@@ -18,14 +18,15 @@ def test_arcparam_1(mocker):
|
|||||||
def test_arcparam_2(mocker):
|
def test_arcparam_2(mocker):
|
||||||
param = arcparam.getparam("SsjCnHOk-Sk", seektime=100)
|
param = arcparam.getparam("SsjCnHOk-Sk", seektime=100)
|
||||||
url = f"https://www.youtube.com/live_chat_replay/get_live_chat_replay?continuation={param}&pbj=1"
|
url = f"https://www.youtube.com/live_chat_replay/get_live_chat_replay?continuation={param}&pbj=1"
|
||||||
resp = requests.Session().get(url, headers=config.headers)
|
resp = httpx.Client(http2=True).get(url, headers=config.headers)
|
||||||
jsn = json.loads(resp.text)
|
jsn = json.loads(resp.text)
|
||||||
parser = Parser(is_replay=True)
|
parser = Parser(is_replay=True)
|
||||||
contents = parser.get_contents(jsn)
|
contents = parser.get_contents(jsn)
|
||||||
_ , chatdata = parser.parse(contents)
|
_, chatdata = parser.parse(contents)
|
||||||
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatTextMessageRenderer"]["id"]
|
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatTextMessageRenderer"]["id"]
|
||||||
assert test_id == "CjoKGkNMYXBzZTdudHVVQ0Zjc0IxZ0FkTnFnQjVREhxDSnlBNHV2bnR1VUNGV0dnd2dvZDd3NE5aZy0w"
|
assert test_id == "CjoKGkNMYXBzZTdudHVVQ0Zjc0IxZ0FkTnFnQjVREhxDSnlBNHV2bnR1VUNGV0dnd2dvZDd3NE5aZy0w"
|
||||||
|
|
||||||
|
|
||||||
def test_arcparam_3(mocker):
|
def test_arcparam_3(mocker):
|
||||||
param = arcparam.getparam("01234567890")
|
param = arcparam.getparam("01234567890")
|
||||||
assert param == "op2w0wQmGhxDZzhLRFFvTE1ERXlNelExTmpjNE9UQWdBUT09SARgAXICCAE%3D"
|
assert param == "op2w0wQmGhxDZzhLRFFvTE1ERXlNelExTmpjNE9UQWdBUT09SARgAXICCAE%3D"
|
||||||
|
|||||||
@@ -1,41 +0,0 @@
|
|||||||
from pytchat.tool.mining import parser
|
|
||||||
import pytchat.config as config
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
from pytchat.paramgen import arcparam_mining as arcparam
|
|
||||||
|
|
||||||
|
|
||||||
def test_arcparam_e(mocker):
|
|
||||||
try:
|
|
||||||
arcparam.getparam("01234567890", -1)
|
|
||||||
assert False
|
|
||||||
except ValueError:
|
|
||||||
assert True
|
|
||||||
|
|
||||||
|
|
||||||
def test_arcparam_0(mocker):
|
|
||||||
param = arcparam.getparam("01234567890", 0)
|
|
||||||
|
|
||||||
assert param == "op2w0wQsGiBDZzhhRFFvTE1ERXlNelExTmpjNE9UQWdBUSUzRCUzREABYARyAggBeAE%3D"
|
|
||||||
|
|
||||||
|
|
||||||
def test_arcparam_1(mocker):
|
|
||||||
param = arcparam.getparam("01234567890", seektime=100000)
|
|
||||||
print(param)
|
|
||||||
assert param == "op2w0wQzGiBDZzhhRFFvTE1ERXlNelExTmpjNE9UQWdBUSUzRCUzREABWgUQgMLXL2AEcgIIAXgB"
|
|
||||||
|
|
||||||
|
|
||||||
def test_arcparam_2(mocker):
|
|
||||||
param = arcparam.getparam("PZz9NB0-Z64", 1)
|
|
||||||
url = f"https://www.youtube.com/live_chat_replay?continuation={param}&playerOffsetMs=1000&pbj=1"
|
|
||||||
resp = requests.Session().get(url, headers=config.headers)
|
|
||||||
jsn = json.loads(resp.text)
|
|
||||||
_, chatdata = parser.parse(jsn[1])
|
|
||||||
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatPaidMessageRenderer"]["id"]
|
|
||||||
print(test_id)
|
|
||||||
assert test_id == "ChwKGkNKSGE0YnFJeWVBQ0ZWcUF3Z0VkdGIwRm9R"
|
|
||||||
|
|
||||||
|
|
||||||
def test_arcparam_3(mocker):
|
|
||||||
param = arcparam.getparam("01234567890")
|
|
||||||
assert param == "op2w0wQsGiBDZzhhRFFvTE1ERXlNelExTmpjNE9UQWdBUSUzRCUzREABYARyAggBeAE%3D"
|
|
||||||
228
tests/test_default_processor.py
Normal file
228
tests/test_default_processor.py
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
import json
|
||||||
|
from pytchat.parser.live import Parser
|
||||||
|
from pytchat.processors.default.processor import DefaultProcessor
|
||||||
|
|
||||||
|
|
||||||
|
def test_textmessage(mocker):
|
||||||
|
'''text message'''
|
||||||
|
processor = DefaultProcessor()
|
||||||
|
parser = Parser(is_replay=False)
|
||||||
|
_json = _open_file("tests/testdata/default/textmessage.json")
|
||||||
|
|
||||||
|
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||||
|
data = {
|
||||||
|
"video_id": "",
|
||||||
|
"timeout": 7,
|
||||||
|
"chatdata": chatdata
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = processor.process([data]).items[0]
|
||||||
|
assert ret.chattype == "textMessage"
|
||||||
|
assert ret.id == "dummy_id"
|
||||||
|
assert ret.message == "dummy_message"
|
||||||
|
assert ret.timestamp == 1570678496000
|
||||||
|
assert ret.datetime == "2019-10-10 12:34:56"
|
||||||
|
assert ret.author.name == "author_name"
|
||||||
|
assert ret.author.channelId == "author_channel_id"
|
||||||
|
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||||
|
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||||
|
assert ret.author.badgeUrl == ""
|
||||||
|
assert ret.author.isVerified is False
|
||||||
|
assert ret.author.isChatOwner is False
|
||||||
|
assert ret.author.isChatSponsor is False
|
||||||
|
assert ret.author.isChatModerator is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_textmessage_replay_member(mocker):
|
||||||
|
'''text message replay member'''
|
||||||
|
processor = DefaultProcessor()
|
||||||
|
parser = Parser(is_replay=True)
|
||||||
|
_json = _open_file("tests/testdata/default/replay_member_text.json")
|
||||||
|
|
||||||
|
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||||
|
data = {
|
||||||
|
"video_id": "",
|
||||||
|
"timeout": 7,
|
||||||
|
"chatdata": chatdata
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = processor.process([data]).items[0]
|
||||||
|
assert ret.chattype == "textMessage"
|
||||||
|
assert ret.type == "textMessage"
|
||||||
|
assert ret.id == "dummy_id"
|
||||||
|
assert ret.message == "dummy_message"
|
||||||
|
assert ret.messageEx == ["dummy_message"]
|
||||||
|
assert ret.timestamp == 1570678496000
|
||||||
|
assert ret.datetime == "2019-10-10 12:34:56"
|
||||||
|
assert ret.elapsedTime == "1:23:45"
|
||||||
|
assert ret.author.name == "author_name"
|
||||||
|
assert ret.author.channelId == "author_channel_id"
|
||||||
|
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||||
|
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||||
|
assert ret.author.badgeUrl == "https://yt3.ggpht.com/X=s16-c-k"
|
||||||
|
assert ret.author.isVerified is False
|
||||||
|
assert ret.author.isChatOwner is False
|
||||||
|
assert ret.author.isChatSponsor is True
|
||||||
|
assert ret.author.isChatModerator is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_superchat(mocker):
|
||||||
|
'''superchat'''
|
||||||
|
processor = DefaultProcessor()
|
||||||
|
parser = Parser(is_replay=False)
|
||||||
|
_json = _open_file("tests/testdata/default/superchat.json")
|
||||||
|
|
||||||
|
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||||
|
data = {
|
||||||
|
"video_id": "",
|
||||||
|
"timeout": 7,
|
||||||
|
"chatdata": chatdata
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = processor.process([data]).items[0]
|
||||||
|
print(json.dumps(chatdata, ensure_ascii=False))
|
||||||
|
assert ret.chattype == "superChat"
|
||||||
|
assert ret.type == "superChat"
|
||||||
|
assert ret.id == "dummy_id"
|
||||||
|
assert ret.message == "dummy_message"
|
||||||
|
assert ret.messageEx == ["dummy_message"]
|
||||||
|
assert ret.timestamp == 1570678496000
|
||||||
|
assert ret.datetime == "2019-10-10 12:34:56"
|
||||||
|
assert ret.elapsedTime == ""
|
||||||
|
assert ret.amountValue == 800
|
||||||
|
assert ret.amountString == "¥800"
|
||||||
|
assert ret.currency == "JPY"
|
||||||
|
assert ret.bgColor == 4280150454
|
||||||
|
assert ret.author.name == "author_name"
|
||||||
|
assert ret.author.channelId == "author_channel_id"
|
||||||
|
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||||
|
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||||
|
assert ret.author.badgeUrl == ""
|
||||||
|
assert ret.author.isVerified is False
|
||||||
|
assert ret.author.isChatOwner is False
|
||||||
|
assert ret.author.isChatSponsor is False
|
||||||
|
assert ret.author.isChatModerator is False
|
||||||
|
assert ret.colors.headerBackgroundColor == 4278239141
|
||||||
|
assert ret.colors.headerTextColor == 4278190080
|
||||||
|
assert ret.colors.bodyBackgroundColor == 4280150454
|
||||||
|
assert ret.colors.bodyTextColor == 4278190080
|
||||||
|
assert ret.colors.authorNameTextColor == 2315255808
|
||||||
|
assert ret.colors.timestampColor == 2147483648
|
||||||
|
|
||||||
|
|
||||||
|
def test_supersticker(mocker):
|
||||||
|
'''supersticker'''
|
||||||
|
processor = DefaultProcessor()
|
||||||
|
parser = Parser(is_replay=False)
|
||||||
|
_json = _open_file("tests/testdata/default/supersticker.json")
|
||||||
|
|
||||||
|
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||||
|
data = {
|
||||||
|
"video_id": "",
|
||||||
|
"timeout": 7,
|
||||||
|
"chatdata": chatdata
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = processor.process([data]).items[0]
|
||||||
|
print(json.dumps(chatdata, ensure_ascii=False))
|
||||||
|
assert ret.chattype == "superSticker"
|
||||||
|
assert ret.type == "superSticker"
|
||||||
|
assert ret.id == "dummy_id"
|
||||||
|
assert ret.message == ""
|
||||||
|
assert ret.messageEx == []
|
||||||
|
assert ret.timestamp == 1570678496000
|
||||||
|
assert ret.datetime == "2019-10-10 12:34:56"
|
||||||
|
assert ret.elapsedTime == ""
|
||||||
|
assert ret.amountValue == 200
|
||||||
|
assert ret.amountString == "¥200"
|
||||||
|
assert ret.currency == "JPY"
|
||||||
|
assert ret.bgColor == 4278237396
|
||||||
|
assert ret.sticker == "https://lh3.googleusercontent.com/param_s=s72-rp"
|
||||||
|
assert ret.author.name == "author_name"
|
||||||
|
assert ret.author.channelId == "author_channel_id"
|
||||||
|
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||||
|
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||||
|
assert ret.author.badgeUrl == ""
|
||||||
|
assert ret.author.isVerified is False
|
||||||
|
assert ret.author.isChatOwner is False
|
||||||
|
assert ret.author.isChatSponsor is False
|
||||||
|
assert ret.author.isChatModerator is False
|
||||||
|
assert ret.colors.backgroundColor == 4278237396
|
||||||
|
assert ret.colors.moneyChipBackgroundColor == 4278248959
|
||||||
|
assert ret.colors.moneyChipTextColor == 4278190080
|
||||||
|
assert ret.colors.authorNameTextColor == 3003121664
|
||||||
|
|
||||||
|
|
||||||
|
def test_sponsor(mocker):
|
||||||
|
'''sponsor(membership)'''
|
||||||
|
processor = DefaultProcessor()
|
||||||
|
parser = Parser(is_replay=False)
|
||||||
|
_json = _open_file("tests/testdata/default/newSponsor_current.json")
|
||||||
|
|
||||||
|
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||||
|
data = {
|
||||||
|
"video_id": "",
|
||||||
|
"timeout": 7,
|
||||||
|
"chatdata": chatdata
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = processor.process([data]).items[0]
|
||||||
|
print(json.dumps(chatdata, ensure_ascii=False))
|
||||||
|
assert ret.chattype == "newSponsor"
|
||||||
|
assert ret.type == "newSponsor"
|
||||||
|
assert ret.id == "dummy_id"
|
||||||
|
assert ret.message == "新規メンバー"
|
||||||
|
assert ret.messageEx == ["新規メンバー"]
|
||||||
|
assert ret.timestamp == 1570678496000
|
||||||
|
assert ret.datetime == "2019-10-10 12:34:56"
|
||||||
|
assert ret.elapsedTime == ""
|
||||||
|
assert ret.bgColor == 0
|
||||||
|
assert ret.author.name == "author_name"
|
||||||
|
assert ret.author.channelId == "author_channel_id"
|
||||||
|
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||||
|
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||||
|
assert ret.author.badgeUrl == "https://yt3.ggpht.com/X=s32-c-k"
|
||||||
|
assert ret.author.isVerified is False
|
||||||
|
assert ret.author.isChatOwner is False
|
||||||
|
assert ret.author.isChatSponsor is True
|
||||||
|
assert ret.author.isChatModerator is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_sponsor_legacy(mocker):
|
||||||
|
'''lagacy sponsor(membership)'''
|
||||||
|
processor = DefaultProcessor()
|
||||||
|
parser = Parser(is_replay=False)
|
||||||
|
_json = _open_file("tests/testdata/default/newSponsor_lagacy.json")
|
||||||
|
|
||||||
|
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||||
|
data = {
|
||||||
|
"video_id": "",
|
||||||
|
"timeout": 7,
|
||||||
|
"chatdata": chatdata
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = processor.process([data]).items[0]
|
||||||
|
print(json.dumps(chatdata, ensure_ascii=False))
|
||||||
|
assert ret.chattype == "newSponsor"
|
||||||
|
assert ret.type == "newSponsor"
|
||||||
|
assert ret.id == "dummy_id"
|
||||||
|
assert ret.message == "新規メンバー / ようこそ、author_name!"
|
||||||
|
assert ret.messageEx == ["新規メンバー / ようこそ、author_name!"]
|
||||||
|
assert ret.timestamp == 1570678496000
|
||||||
|
assert ret.datetime == "2019-10-10 12:34:56"
|
||||||
|
assert ret.elapsedTime == ""
|
||||||
|
assert ret.bgColor == 0
|
||||||
|
assert ret.author.name == "author_name"
|
||||||
|
assert ret.author.channelId == "author_channel_id"
|
||||||
|
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||||
|
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||||
|
assert ret.author.badgeUrl == ""
|
||||||
|
assert ret.author.isVerified is False
|
||||||
|
assert ret.author.isChatOwner is False
|
||||||
|
assert ret.author.isChatSponsor is True
|
||||||
|
assert ret.author.isChatModerator is False
|
||||||
|
|
||||||
|
|
||||||
|
def _open_file(path):
|
||||||
|
with open(path, mode='r', encoding='utf-8') as f:
|
||||||
|
return f.read()
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
from pytchat.tool.extract import parser
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from aioresponses import aioresponses
|
|
||||||
from concurrent.futures import CancelledError
|
|
||||||
from pytchat.tool.extract import asyncdl
|
|
||||||
|
|
||||||
def _open_file(path):
|
|
||||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
|
||||||
return f.read()
|
|
||||||
|
|
||||||
|
|
||||||
def test_asyncdl_split():
|
|
||||||
|
|
||||||
ret = asyncdl._split(0,1000,1)
|
|
||||||
assert ret == [0]
|
|
||||||
|
|
||||||
ret = asyncdl._split(1000,1000,10)
|
|
||||||
assert ret == [1000]
|
|
||||||
|
|
||||||
ret = asyncdl._split(0,1000,5)
|
|
||||||
assert ret == [0,200,400,600,800]
|
|
||||||
|
|
||||||
ret = asyncdl._split(10.5, 700.3, 5)
|
|
||||||
assert ret == [10, 148, 286, 424, 562]
|
|
||||||
|
|
||||||
|
|
||||||
ret = asyncdl._split(0,500,5)
|
|
||||||
assert ret == [0,125,250,375]
|
|
||||||
|
|
||||||
ret = asyncdl._split(0,500,500)
|
|
||||||
assert ret == [0,125,250,375]
|
|
||||||
|
|
||||||
ret = asyncdl._split(-1,1000,5)
|
|
||||||
assert ret == [-1, 199, 399, 599, 799]
|
|
||||||
|
|
||||||
"""invalid argument order"""
|
|
||||||
try:
|
|
||||||
ret = asyncdl._split(500,0,5)
|
|
||||||
assert False
|
|
||||||
except ValueError:
|
|
||||||
assert True
|
|
||||||
|
|
||||||
"""invalid count"""
|
|
||||||
try:
|
|
||||||
ret = asyncdl._split(0,500,-1)
|
|
||||||
assert False
|
|
||||||
except ValueError:
|
|
||||||
assert True
|
|
||||||
|
|
||||||
try:
|
|
||||||
ret = asyncdl._split(0,500,0)
|
|
||||||
assert False
|
|
||||||
except ValueError:
|
|
||||||
assert True
|
|
||||||
|
|
||||||
"""invalid argument type"""
|
|
||||||
try:
|
|
||||||
ret = asyncdl._split(0,5000,5.2)
|
|
||||||
assert False
|
|
||||||
except ValueError:
|
|
||||||
assert True
|
|
||||||
|
|
||||||
try:
|
|
||||||
ret = asyncdl._split(0,5000,"test")
|
|
||||||
assert False
|
|
||||||
except ValueError:
|
|
||||||
assert True
|
|
||||||
|
|
||||||
try:
|
|
||||||
ret = asyncdl._split([0,1],5000,5)
|
|
||||||
assert False
|
|
||||||
except ValueError:
|
|
||||||
assert True
|
|
||||||
@@ -1,60 +1,66 @@
|
|||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import json
|
import json
|
||||||
import os, sys
|
|
||||||
import time
|
|
||||||
from pytchat.tool.extract import duplcheck
|
from pytchat.tool.extract import duplcheck
|
||||||
from pytchat.tool.extract import parser
|
from pytchat.tool.extract import parser
|
||||||
from pytchat.tool.extract.block import Block
|
from pytchat.tool.extract.block import Block
|
||||||
from pytchat.tool.extract.duplcheck import _dump
|
from pytchat.tool.extract.duplcheck import _dump
|
||||||
def _open_file(path):
|
|
||||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
|
||||||
return f.read()
|
|
||||||
|
|
||||||
|
|
||||||
|
def _open_file(path):
|
||||||
|
with open(path, mode='r', encoding='utf-8') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
def test_overlap():
|
def test_overlap():
|
||||||
"""
|
"""
|
||||||
test overlap data
|
test overlap data
|
||||||
operation : [0] [2] [3] [4] -> last :align to end
|
operation : [0] [2] [3] [4] -> last :align to end
|
||||||
[1] , [5] -> no change
|
[1] , [5] -> no change
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def load_chatdata(filename):
|
def load_chatdata(filename):
|
||||||
return parser.parse(
|
return parser.parse(
|
||||||
json.loads(_open_file("tests/testdata/extract_duplcheck/overlap/"+filename))
|
json.loads(_open_file(
|
||||||
|
"tests/testdata/extract_duplcheck/overlap/" + filename))
|
||||||
)[1]
|
)[1]
|
||||||
|
|
||||||
blocks = (
|
blocks = (
|
||||||
Block(first = 0, last= 12771, end= 9890,chat_data = load_chatdata("dp0-0.json")),
|
Block(first=0, last=12771, end=9890,
|
||||||
Block(first = 9890, last= 15800, end= 20244,chat_data = load_chatdata("dp0-1.json")),
|
chat_data=load_chatdata("dp0-0.json")),
|
||||||
Block(first = 20244,last= 45146, end= 32476,chat_data = load_chatdata("dp0-2.json")),
|
Block(first=9890, last=15800, end=20244,
|
||||||
Block(first = 32476,last= 50520, end= 41380,chat_data = load_chatdata("dp0-3.json")),
|
chat_data=load_chatdata("dp0-1.json")),
|
||||||
Block(first = 41380,last= 62875, end= 52568,chat_data = load_chatdata("dp0-4.json")),
|
Block(first=20244, last=45146, end=32476,
|
||||||
Block(first = 52568,last= 62875, end= 54000,chat_data = load_chatdata("dp0-5.json"),is_last=True)
|
chat_data=load_chatdata("dp0-2.json")),
|
||||||
|
Block(first=32476, last=50520, end=41380,
|
||||||
|
chat_data=load_chatdata("dp0-3.json")),
|
||||||
|
Block(first=41380, last=62875, end=52568,
|
||||||
|
chat_data=load_chatdata("dp0-4.json")),
|
||||||
|
Block(first=52568, last=62875, end=54000,
|
||||||
|
chat_data=load_chatdata("dp0-5.json"), is_last=True)
|
||||||
)
|
)
|
||||||
result = duplcheck.remove_overlap(blocks)
|
result = duplcheck.remove_overlap(blocks)
|
||||||
#dp0-0.json has item offset time is 9890 (equals block[0].end = block[1].first),
|
# dp0-0.json has item offset time is 9890 (equals block[0].end = block[1].first),
|
||||||
#but must be aligne to the most close and smaller value:9779.
|
# but must be aligne to the most close and smaller value:9779.
|
||||||
assert result[0].last == 9779
|
assert result[0].last == 9779
|
||||||
|
|
||||||
assert result[1].last == 15800
|
assert result[1].last == 15800
|
||||||
|
|
||||||
assert result[2].last == 32196
|
assert result[2].last == 32196
|
||||||
|
|
||||||
assert result[3].last == 41116
|
assert result[3].last == 41116
|
||||||
|
|
||||||
assert result[4].last == 52384
|
assert result[4].last == 52384
|
||||||
|
|
||||||
#the last block must be always added to result.
|
# the last block must be always added to result.
|
||||||
assert result[5].last == 62875
|
assert result[5].last == 62875
|
||||||
|
|
||||||
|
|
||||||
def test_duplicate_head():
|
def test_duplicate_head():
|
||||||
|
|
||||||
def load_chatdata(filename):
|
def load_chatdata(filename):
|
||||||
return parser.parse(
|
return parser.parse(
|
||||||
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
|
json.loads(_open_file(
|
||||||
|
"tests/testdata/extract_duplcheck/head/" + filename))
|
||||||
)[1]
|
)[1]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -69,25 +75,26 @@ def test_duplicate_head():
|
|||||||
result : [2] , [4] , [5]
|
result : [2] , [4] , [5]
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#chat data offsets are ignored.
|
# chat data offsets are ignored.
|
||||||
blocks = (
|
blocks = (
|
||||||
Block(first = 0, last = 2500, chat_data = load_chatdata("dp0-0.json")),
|
Block(first=0, last=2500, chat_data=load_chatdata("dp0-0.json")),
|
||||||
Block(first = 0, last =38771, chat_data = load_chatdata("dp0-1.json")),
|
Block(first=0, last=38771, chat_data=load_chatdata("dp0-1.json")),
|
||||||
Block(first = 0, last =45146, chat_data = load_chatdata("dp0-2.json")),
|
Block(first=0, last=45146, chat_data=load_chatdata("dp0-2.json")),
|
||||||
Block(first = 20244, last =60520, chat_data = load_chatdata("dp0-3.json")),
|
Block(first=20244, last=60520, chat_data=load_chatdata("dp0-3.json")),
|
||||||
Block(first = 20244, last =62875, chat_data = load_chatdata("dp0-4.json")),
|
Block(first=20244, last=62875, chat_data=load_chatdata("dp0-4.json")),
|
||||||
Block(first = 52568, last =62875, chat_data = load_chatdata("dp0-5.json"))
|
Block(first=52568, last=62875, chat_data=load_chatdata("dp0-5.json"))
|
||||||
)
|
)
|
||||||
_dump(blocks)
|
_dump(blocks)
|
||||||
result = duplcheck.remove_duplicate_head(blocks)
|
result = duplcheck.remove_duplicate_head(blocks)
|
||||||
|
|
||||||
assert len(result) == 3
|
assert len(result) == 3
|
||||||
assert result[0].first == blocks[2].first
|
assert result[0].first == blocks[2].first
|
||||||
assert result[0].last == blocks[2].last
|
assert result[0].last == blocks[2].last
|
||||||
assert result[1].first == blocks[4].first
|
assert result[1].first == blocks[4].first
|
||||||
assert result[1].last == blocks[4].last
|
assert result[1].last == blocks[4].last
|
||||||
assert result[2].first == blocks[5].first
|
assert result[2].first == blocks[5].first
|
||||||
assert result[2].last == blocks[5].last
|
assert result[2].last == blocks[5].last
|
||||||
|
|
||||||
|
|
||||||
def test_duplicate_tail():
|
def test_duplicate_tail():
|
||||||
"""
|
"""
|
||||||
@@ -103,26 +110,25 @@ def test_duplicate_tail():
|
|||||||
"""
|
"""
|
||||||
def load_chatdata(filename):
|
def load_chatdata(filename):
|
||||||
return parser.parse(
|
return parser.parse(
|
||||||
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
|
json.loads(_open_file(
|
||||||
|
"tests/testdata/extract_duplcheck/head/" + filename))
|
||||||
)[1]
|
)[1]
|
||||||
#chat data offsets are ignored.
|
# chat data offsets are ignored.
|
||||||
blocks = (
|
blocks = (
|
||||||
Block(first = 0,last = 2500, chat_data=load_chatdata("dp0-0.json")),
|
Block(first=0, last=2500, chat_data=load_chatdata("dp0-0.json")),
|
||||||
Block(first = 1500,last = 2500, chat_data=load_chatdata("dp0-1.json")),
|
Block(first=1500, last=2500, chat_data=load_chatdata("dp0-1.json")),
|
||||||
Block(first = 10000,last = 45146, chat_data=load_chatdata("dp0-2.json")),
|
Block(first=10000, last=45146, chat_data=load_chatdata("dp0-2.json")),
|
||||||
Block(first = 20244,last = 45146, chat_data=load_chatdata("dp0-3.json")),
|
Block(first=20244, last=45146, chat_data=load_chatdata("dp0-3.json")),
|
||||||
Block(first = 20244,last = 62875, chat_data=load_chatdata("dp0-4.json")),
|
Block(first=20244, last=62875, chat_data=load_chatdata("dp0-4.json")),
|
||||||
Block(first = 52568,last = 62875, chat_data=load_chatdata("dp0-5.json"))
|
Block(first=52568, last=62875, chat_data=load_chatdata("dp0-5.json"))
|
||||||
)
|
)
|
||||||
|
|
||||||
result = duplcheck.remove_duplicate_tail(blocks)
|
result = duplcheck.remove_duplicate_tail(blocks)
|
||||||
_dump(result)
|
_dump(result)
|
||||||
assert len(result) == 3
|
assert len(result) == 3
|
||||||
assert result[0].first == blocks[0].first
|
assert result[0].first == blocks[0].first
|
||||||
assert result[0].last == blocks[0].last
|
assert result[0].last == blocks[0].last
|
||||||
assert result[1].first == blocks[2].first
|
assert result[1].first == blocks[2].first
|
||||||
assert result[1].last == blocks[2].last
|
assert result[1].last == blocks[2].last
|
||||||
assert result[2].first == blocks[4].first
|
assert result[2].first == blocks[4].first
|
||||||
assert result[2].last == blocks[4].last
|
assert result[2].last == blocks[4].last
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +1,19 @@
|
|||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import json
|
import json
|
||||||
import os, sys
|
|
||||||
import time
|
|
||||||
from aioresponses import aioresponses
|
|
||||||
from pytchat.tool.extract import duplcheck
|
|
||||||
from pytchat.tool.extract import parser
|
from pytchat.tool.extract import parser
|
||||||
from pytchat.tool.extract.block import Block
|
from pytchat.tool.extract.block import Block
|
||||||
from pytchat.tool.extract.patch import Patch, fill, split, set_patch
|
from pytchat.tool.extract.patch import Patch, split
|
||||||
from pytchat.tool.extract.duplcheck import _dump
|
|
||||||
|
|
||||||
def _open_file(path):
|
def _open_file(path):
|
||||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
with open(path, mode='r', encoding='utf-8') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
def load_chatdata(filename):
|
def load_chatdata(filename):
|
||||||
return parser.parse(
|
return parser.parse(
|
||||||
json.loads(_open_file("tests/testdata/fetch_patch/"+filename))
|
json.loads(_open_file("tests/testdata/fetch_patch/" + filename))
|
||||||
)[1]
|
)[1]
|
||||||
|
|
||||||
|
|
||||||
def test_split_0():
|
def test_split_0():
|
||||||
@@ -61,20 +57,23 @@ def test_split_0():
|
|||||||
@fetched patch
|
@fetched patch
|
||||||
|-- patch --|
|
|-- patch --|
|
||||||
"""
|
"""
|
||||||
parent = Block(first=0, last=4000, end=60000, continuation='parent', during_split=True)
|
parent = Block(first=0, last=4000, end=60000,
|
||||||
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
continuation='parent', during_split=True)
|
||||||
|
child = Block(first=0, last=0, end=60000,
|
||||||
|
continuation='mean', during_split=True)
|
||||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||||
first=32500, last=34000, continuation='patch')
|
first=32500, last=34000, continuation='patch')
|
||||||
|
|
||||||
split(parent,child,patch)
|
split(parent, child, patch)
|
||||||
|
|
||||||
assert child.continuation == 'patch'
|
assert child.continuation == 'patch'
|
||||||
assert parent.last < child.first
|
assert parent.last < child.first
|
||||||
assert parent.end == child.first
|
assert parent.end == child.first
|
||||||
assert child.first < child.last
|
assert child.first < child.last
|
||||||
assert child.last < child.end
|
assert child.last < child.end
|
||||||
assert parent.during_split == False
|
assert parent.during_split is False
|
||||||
assert child.during_split == False
|
assert child.during_split is False
|
||||||
|
|
||||||
|
|
||||||
def test_split_1():
|
def test_split_1():
|
||||||
"""patch.first <= parent_block.last
|
"""patch.first <= parent_block.last
|
||||||
@@ -119,14 +118,15 @@ def test_split_1():
|
|||||||
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
||||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||||
first=32500, last=34000, continuation='patch')
|
first=32500, last=34000, continuation='patch')
|
||||||
|
|
||||||
split(parent,child,patch)
|
|
||||||
|
|
||||||
assert parent.last == 33000 #no change
|
split(parent, child, patch)
|
||||||
assert parent.end == 60000 #no change
|
|
||||||
|
assert parent.last == 33000 # no change
|
||||||
|
assert parent.end == 60000 # no change
|
||||||
assert child.continuation is None
|
assert child.continuation is None
|
||||||
assert parent.during_split == False
|
assert parent.during_split is False
|
||||||
assert child.during_split == True #exclude during_split sequence
|
assert child.during_split is True # exclude during_split sequence
|
||||||
|
|
||||||
|
|
||||||
def test_split_2():
|
def test_split_2():
|
||||||
"""child_block.end < patch.last:
|
"""child_block.end < patch.last:
|
||||||
@@ -174,7 +174,7 @@ def test_split_2():
|
|||||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||||
first=32500, last=34000, continuation='patch')
|
first=32500, last=34000, continuation='patch')
|
||||||
|
|
||||||
split(parent,child,patch)
|
split(parent, child, patch)
|
||||||
|
|
||||||
assert child.continuation is None
|
assert child.continuation is None
|
||||||
assert parent.last < child.first
|
assert parent.last < child.first
|
||||||
@@ -182,8 +182,9 @@ def test_split_2():
|
|||||||
assert child.first < child.last
|
assert child.first < child.last
|
||||||
assert child.last < child.end
|
assert child.last < child.end
|
||||||
assert child.continuation is None
|
assert child.continuation is None
|
||||||
assert parent.during_split == False
|
assert parent.during_split is False
|
||||||
assert child.during_split == False
|
assert child.during_split is False
|
||||||
|
|
||||||
|
|
||||||
def test_split_none():
|
def test_split_none():
|
||||||
"""patch.last <= parent_block.last
|
"""patch.last <= parent_block.last
|
||||||
@@ -193,7 +194,7 @@ def test_split_none():
|
|||||||
and parent.block.last exceeds patch.first.
|
and parent.block.last exceeds patch.first.
|
||||||
|
|
||||||
In this case, fetched patch is all discarded,
|
In this case, fetched patch is all discarded,
|
||||||
and worker searches other processing block again.
|
and worker searches other processing block again.
|
||||||
|
|
||||||
~~~~~~ before ~~~~~~
|
~~~~~~ before ~~~~~~
|
||||||
|
|
||||||
@@ -229,10 +230,10 @@ def test_split_none():
|
|||||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||||
first=32500, last=34000, continuation='patch')
|
first=32500, last=34000, continuation='patch')
|
||||||
|
|
||||||
split(parent,child,patch)
|
split(parent, child, patch)
|
||||||
|
|
||||||
assert parent.last == 40000 #no change
|
assert parent.last == 40000 # no change
|
||||||
assert parent.end == 60000 #no change
|
assert parent.end == 60000 # no change
|
||||||
assert child.continuation is None
|
assert child.continuation is None
|
||||||
assert parent.during_split == False
|
assert parent.during_split is False
|
||||||
assert child.during_split == True #exclude during_split sequence
|
assert child.during_split is True # exclude during_split sequence
|
||||||
|
|||||||
55
tests/test_extract_video_id.py
Normal file
55
tests/test_extract_video_id.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
from pytchat.util.extract_video_id import extract_video_id
|
||||||
|
from pytchat.exceptions import InvalidVideoIdException
|
||||||
|
|
||||||
|
VALID_TEST_PATTERNS = (
|
||||||
|
("ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||||
|
("vid_test_be", "vid_test_be"),
|
||||||
|
("https://www.youtube.com/watch?v=123_456_789", "123_456_789"),
|
||||||
|
("https://www.youtube.com/watch?v=123_456_789&t=123s", "123_456_789"),
|
||||||
|
("www.youtube.com/watch?v=123_456_789", "123_456_789"),
|
||||||
|
("watch?v=123_456_789", "123_456_789"),
|
||||||
|
("youtube.com/watch?v=123_456_789", "123_456_789"),
|
||||||
|
("http://youtu.be/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||||
|
("youtu.be/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||||
|
("https://www.youtube.com/watch?v=ABC_EFG_IJK&list=XYZ_ABC_12345&start_radio=1&t=1", "ABC_EFG_IJK"),
|
||||||
|
("https://www.youtube.com/embed/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||||
|
("www.youtube.com/embed/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||||
|
("youtube.com/embed/ABC_EFG_IJK", "ABC_EFG_IJK")
|
||||||
|
)
|
||||||
|
|
||||||
|
INVALID_TEST_PATTERNS = (
|
||||||
|
("", ""),
|
||||||
|
("0123456789", "0123456789"), # less than 11 letters id
|
||||||
|
("more_than_11_letter_string", "more_than_11_letter_string"),
|
||||||
|
("https://www.youtube.com/watch?v=more_than_11_letter_string", "more_than_11_letter_string"),
|
||||||
|
("https://www.youtube.com/channel/123_456_789", "123_456_789"),
|
||||||
|
)
|
||||||
|
|
||||||
|
TYPEERROR_TEST_PATTERNS = (
|
||||||
|
(100, 100), # not string
|
||||||
|
(["123_456_789"], "123_456_789"), # not string
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_valid_pattern():
|
||||||
|
for pattern in VALID_TEST_PATTERNS:
|
||||||
|
ret = extract_video_id(pattern[0])
|
||||||
|
assert ret == pattern[1]
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_invalid_pattern():
|
||||||
|
for pattern in INVALID_TEST_PATTERNS:
|
||||||
|
try:
|
||||||
|
extract_video_id(pattern[0])
|
||||||
|
assert False
|
||||||
|
except InvalidVideoIdException:
|
||||||
|
assert True
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_typeerror_pattern():
|
||||||
|
for pattern in TYPEERROR_TEST_PATTERNS:
|
||||||
|
try:
|
||||||
|
extract_video_id(pattern[0])
|
||||||
|
assert False
|
||||||
|
except TypeError:
|
||||||
|
assert True
|
||||||
@@ -1,5 +1,8 @@
|
|||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
from aioresponses import aioresponses
|
from pytest_httpx import HTTPXMock
|
||||||
|
from concurrent.futures import CancelledError
|
||||||
|
from pytchat.core_multithread.livechat import LiveChat
|
||||||
from pytchat.core_async.livechat import LiveChatAsync
|
from pytchat.core_async.livechat import LiveChatAsync
|
||||||
from pytchat.exceptions import ResponseContextError
|
from pytchat.exceptions import ResponseContextError
|
||||||
|
|
||||||
@@ -9,34 +12,37 @@ def _open_file(path):
|
|||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
@aioresponses()
|
def add_response_file(httpx_mock: HTTPXMock, jsonfile_path: str):
|
||||||
def test_Async(*mock):
|
testdata = json.loads(_open_file(jsonfile_path))
|
||||||
vid = ''
|
httpx_mock.add_response(json=testdata)
|
||||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
|
||||||
_text = json.loads(_text)
|
|
||||||
mock[0].get(
|
def test_async(httpx_mock: HTTPXMock):
|
||||||
f"https://www.youtube.com/live_chat?v={vid}&is_popout=1", status=200, body=_text)
|
add_response_file(httpx_mock, 'tests/testdata/paramgen_firstread.json')
|
||||||
|
|
||||||
|
async def test_loop():
|
||||||
|
try:
|
||||||
|
chat = LiveChatAsync(video_id='__test_id__')
|
||||||
|
_ = await chat.get()
|
||||||
|
assert chat.is_alive()
|
||||||
|
chat.terminate()
|
||||||
|
assert not chat.is_alive()
|
||||||
|
except ResponseContextError:
|
||||||
|
assert False
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
try:
|
try:
|
||||||
chat = LiveChatAsync(video_id='')
|
loop.run_until_complete(test_loop())
|
||||||
|
except CancelledError:
|
||||||
|
assert True
|
||||||
|
|
||||||
|
|
||||||
|
def test_multithread(httpx_mock: HTTPXMock):
|
||||||
|
add_response_file(httpx_mock, 'tests/testdata/paramgen_firstread.json')
|
||||||
|
try:
|
||||||
|
chat = LiveChat(video_id='__test_id__')
|
||||||
|
_ = chat.get()
|
||||||
assert chat.is_alive()
|
assert chat.is_alive()
|
||||||
chat.terminate()
|
chat.terminate()
|
||||||
assert not chat.is_alive()
|
assert not chat.is_alive()
|
||||||
except ResponseContextError:
|
except ResponseContextError:
|
||||||
assert not chat.is_alive()
|
assert False
|
||||||
|
|
||||||
|
|
||||||
def test_MultiThread(mocker):
|
|
||||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
|
||||||
_text = json.loads(_text)
|
|
||||||
responseMock = mocker.Mock()
|
|
||||||
responseMock.status_code = 200
|
|
||||||
responseMock.text = _text
|
|
||||||
mocker.patch('requests.Session.get').return_value = responseMock
|
|
||||||
try:
|
|
||||||
chat = LiveChatAsync(video_id='')
|
|
||||||
assert chat.is_alive()
|
|
||||||
chat.terminate()
|
|
||||||
assert not chat.is_alive()
|
|
||||||
except ResponseContextError:
|
|
||||||
chat.terminate()
|
|
||||||
assert not chat.is_alive()
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import re
|
import json
|
||||||
from aioresponses import aioresponses
|
from pytest_httpx import HTTPXMock
|
||||||
from concurrent.futures import CancelledError
|
from concurrent.futures import CancelledError
|
||||||
from pytchat.core_multithread.livechat import LiveChat
|
from pytchat.core_multithread.livechat import LiveChat
|
||||||
from pytchat.core_async.livechat import LiveChatAsync
|
from pytchat.core_async.livechat import LiveChatAsync
|
||||||
@@ -12,18 +12,18 @@ def _open_file(path):
|
|||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
@aioresponses()
|
def add_response_file(httpx_mock: HTTPXMock, jsonfile_path: str):
|
||||||
def test_async_live_stream(*mock):
|
testdata = json.loads(_open_file(jsonfile_path))
|
||||||
|
httpx_mock.add_response(json=testdata)
|
||||||
|
|
||||||
async def test_loop(*mock):
|
|
||||||
pattern = re.compile(
|
def test_async_live_stream(httpx_mock: HTTPXMock):
|
||||||
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
|
||||||
_text = _open_file('tests/testdata/test_stream.json')
|
|
||||||
mock[0].get(pattern, status=200, body=_text)
|
async def test_loop():
|
||||||
chat = LiveChatAsync(video_id='', processor=DummyProcessor())
|
chat = LiveChatAsync(video_id='__test_id__', processor=DummyProcessor())
|
||||||
chats = await chat.get()
|
chats = await chat.get()
|
||||||
rawdata = chats[0]["chatdata"]
|
rawdata = chats[0]["chatdata"]
|
||||||
# assert fetching livachat data
|
|
||||||
assert list(rawdata[0]["addChatItemAction"]["item"].keys())[
|
assert list(rawdata[0]["addChatItemAction"]["item"].keys())[
|
||||||
0] == "liveChatTextMessageRenderer"
|
0] == "liveChatTextMessageRenderer"
|
||||||
assert list(rawdata[1]["addChatItemAction"]["item"].keys())[
|
assert list(rawdata[1]["addChatItemAction"]["item"].keys())[
|
||||||
@@ -41,26 +41,17 @@ def test_async_live_stream(*mock):
|
|||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
try:
|
try:
|
||||||
loop.run_until_complete(test_loop(*mock))
|
loop.run_until_complete(test_loop())
|
||||||
except CancelledError:
|
except CancelledError:
|
||||||
assert True
|
assert True
|
||||||
|
|
||||||
|
|
||||||
@aioresponses()
|
def test_async_replay_stream(httpx_mock: HTTPXMock):
|
||||||
def test_async_replay_stream(*mock):
|
add_response_file(httpx_mock, 'tests/testdata/finished_live.json')
|
||||||
|
add_response_file(httpx_mock, 'tests/testdata/chatreplay.json')
|
||||||
|
|
||||||
async def test_loop(*mock):
|
async def test_loop():
|
||||||
pattern_live = re.compile(
|
chat = LiveChatAsync(video_id='__test_id__', processor=DummyProcessor())
|
||||||
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
|
||||||
pattern_replay = re.compile(
|
|
||||||
r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
|
|
||||||
# empty livechat -> switch to fetch replaychat
|
|
||||||
_text_live = _open_file('tests/testdata/finished_live.json')
|
|
||||||
_text_replay = _open_file('tests/testdata/chatreplay.json')
|
|
||||||
mock[0].get(pattern_live, status=200, body=_text_live)
|
|
||||||
mock[0].get(pattern_replay, status=200, body=_text_replay)
|
|
||||||
|
|
||||||
chat = LiveChatAsync(video_id='', processor=DummyProcessor())
|
|
||||||
chats = await chat.get()
|
chats = await chat.get()
|
||||||
rawdata = chats[0]["chatdata"]
|
rawdata = chats[0]["chatdata"]
|
||||||
# assert fetching replaychat data
|
# assert fetching replaychat data
|
||||||
@@ -71,29 +62,18 @@ def test_async_replay_stream(*mock):
|
|||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
try:
|
try:
|
||||||
loop.run_until_complete(test_loop(*mock))
|
loop.run_until_complete(test_loop())
|
||||||
except CancelledError:
|
except CancelledError:
|
||||||
assert True
|
assert True
|
||||||
|
|
||||||
|
|
||||||
@aioresponses()
|
def test_async_force_replay(httpx_mock: HTTPXMock):
|
||||||
def test_async_force_replay(*mock):
|
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
|
||||||
|
add_response_file(httpx_mock, 'tests/testdata/chatreplay.json')
|
||||||
|
|
||||||
async def test_loop(*mock):
|
async def test_loop():
|
||||||
pattern_live = re.compile(
|
|
||||||
r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
|
||||||
pattern_replay = re.compile(
|
|
||||||
r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
|
|
||||||
# valid live data, but force_replay = True
|
|
||||||
_text_live = _open_file('tests/testdata/test_stream.json')
|
|
||||||
# valid replay data
|
|
||||||
_text_replay = _open_file('tests/testdata/chatreplay.json')
|
|
||||||
|
|
||||||
mock[0].get(pattern_live, status=200, body=_text_live)
|
|
||||||
mock[0].get(pattern_replay, status=200, body=_text_replay)
|
|
||||||
# force replay
|
|
||||||
chat = LiveChatAsync(
|
chat = LiveChatAsync(
|
||||||
video_id='', processor=DummyProcessor(), force_replay=True)
|
video_id='__test_id__', processor=DummyProcessor(), force_replay=True)
|
||||||
chats = await chat.get()
|
chats = await chat.get()
|
||||||
rawdata = chats[0]["chatdata"]
|
rawdata = chats[0]["chatdata"]
|
||||||
# assert fetching replaychat data
|
# assert fetching replaychat data
|
||||||
@@ -105,21 +85,14 @@ def test_async_force_replay(*mock):
|
|||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
try:
|
try:
|
||||||
loop.run_until_complete(test_loop(*mock))
|
loop.run_until_complete(test_loop())
|
||||||
except CancelledError:
|
except CancelledError:
|
||||||
assert True
|
assert True
|
||||||
|
|
||||||
|
|
||||||
def test_multithread_live_stream(mocker):
|
def test_multithread_live_stream(httpx_mock: HTTPXMock):
|
||||||
|
add_response_file(httpx_mock, 'tests/testdata/test_stream.json')
|
||||||
_text = _open_file('tests/testdata/test_stream.json')
|
chat = LiveChat(video_id='__test_id__', processor=DummyProcessor())
|
||||||
responseMock = mocker.Mock()
|
|
||||||
responseMock.status_code = 200
|
|
||||||
responseMock.text = _text
|
|
||||||
mocker.patch(
|
|
||||||
'requests.Session.get').return_value.__enter__.return_value = responseMock
|
|
||||||
|
|
||||||
chat = LiveChat(video_id='test_id', processor=DummyProcessor())
|
|
||||||
chats = chat.get()
|
chats = chat.get()
|
||||||
rawdata = chats[0]["chatdata"]
|
rawdata = chats[0]["chatdata"]
|
||||||
# assert fetching livachat data
|
# assert fetching livachat data
|
||||||
|
|||||||
@@ -1,21 +1,18 @@
|
|||||||
from pytchat.parser.live import Parser
|
from pytchat.parser.live import Parser
|
||||||
import json
|
import json
|
||||||
from aioresponses import aioresponses
|
|
||||||
from pytchat.exceptions import NoContents
|
from pytchat.exceptions import NoContents
|
||||||
|
|
||||||
|
|
||||||
|
parser = Parser(is_replay=False)
|
||||||
|
|
||||||
|
|
||||||
def _open_file(path):
|
def _open_file(path):
|
||||||
with open(path, mode='r', encoding='utf-8') as f:
|
with open(path, mode='r', encoding='utf-8') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
parser = Parser(is_replay=False)
|
|
||||||
|
|
||||||
|
|
||||||
@aioresponses()
|
|
||||||
def test_finishedlive(*mock):
|
def test_finishedlive(*mock):
|
||||||
'''配信が終了した動画を正しく処理できるか'''
|
'''配信が終了した動画を正しく処理できるか'''
|
||||||
|
|
||||||
_text = _open_file('tests/testdata/finished_live.json')
|
_text = _open_file('tests/testdata/finished_live.json')
|
||||||
_text = json.loads(_text)
|
_text = json.loads(_text)
|
||||||
|
|
||||||
@@ -26,10 +23,8 @@ def test_finishedlive(*mock):
|
|||||||
assert True
|
assert True
|
||||||
|
|
||||||
|
|
||||||
@aioresponses()
|
|
||||||
def test_parsejson(*mock):
|
def test_parsejson(*mock):
|
||||||
'''jsonを正常にパースできるか'''
|
'''jsonを正常にパースできるか'''
|
||||||
|
|
||||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
||||||
_text = json.loads(_text)
|
_text = json.loads(_text)
|
||||||
|
|
||||||
|
|||||||
@@ -1,35 +1,39 @@
|
|||||||
|
from json.decoder import JSONDecodeError
|
||||||
from pytchat.tool.videoinfo import VideoInfo
|
from pytchat.tool.videoinfo import VideoInfo
|
||||||
from pytchat.exceptions import InvalidVideoIdException
|
from pytchat.exceptions import InvalidVideoIdException, PatternUnmatchError
|
||||||
import pytest
|
|
||||||
|
|
||||||
def _open_file(path):
|
def _open_file(path):
|
||||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
with open(path, mode='r', encoding='utf-8') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
def _set_test_data(filepath, mocker):
|
def _set_test_data(filepath, mocker):
|
||||||
_text = _open_file(filepath)
|
_text = _open_file(filepath)
|
||||||
response_mock = mocker.Mock()
|
response_mock = mocker.Mock()
|
||||||
response_mock.status_code = 200
|
response_mock.status_code = 200
|
||||||
response_mock.text = _text
|
response_mock.text = _text
|
||||||
mocker.patch('requests.get').return_value = response_mock
|
mocker.patch('httpx.get').return_value = response_mock
|
||||||
|
|
||||||
|
|
||||||
def test_archived_page(mocker):
|
def test_archived_page(mocker):
|
||||||
_set_test_data('tests/testdata/videoinfo/archived_page.txt', mocker)
|
_set_test_data('tests/testdata/videoinfo/archived_page.txt', mocker)
|
||||||
info = VideoInfo('test_id')
|
info = VideoInfo('__test_id__')
|
||||||
actual_thumbnail_url = 'https://i.ytimg.com/vi/fzI9FNjXQ0o/hqdefault.jpg'
|
actual_thumbnail_url = 'https://i.ytimg.com/vi/fzI9FNjXQ0o/hqdefault.jpg'
|
||||||
assert info.video_id == 'test_id'
|
assert info.video_id == '__test_id__'
|
||||||
assert info.get_channel_name() == 'GitHub'
|
assert info.get_channel_name() == 'GitHub'
|
||||||
assert info.get_thumbnail() == actual_thumbnail_url
|
assert info.get_thumbnail() == actual_thumbnail_url
|
||||||
assert info.get_title() == 'GitHub Arctic Code Vault'
|
assert info.get_title() == 'GitHub Arctic Code Vault'
|
||||||
assert info.get_channel_id() == 'UC7c3Kb6jYCRj4JOHHZTxKsQ'
|
assert info.get_channel_id() == 'UC7c3Kb6jYCRj4JOHHZTxKsQ'
|
||||||
assert info.get_duration() == 148
|
assert info.get_duration() == 148
|
||||||
|
|
||||||
|
|
||||||
def test_live_page(mocker):
|
def test_live_page(mocker):
|
||||||
_set_test_data('tests/testdata/videoinfo/live_page.txt', mocker)
|
_set_test_data('tests/testdata/videoinfo/live_page.txt', mocker)
|
||||||
info = VideoInfo('test_id')
|
info = VideoInfo('__test_id__')
|
||||||
'''live page :duration = 0'''
|
'''live page: duration==0'''
|
||||||
assert info.get_duration() == 0
|
assert info.get_duration() == 0
|
||||||
assert info.video_id == 'test_id'
|
assert info.video_id == '__test_id__'
|
||||||
assert info.get_channel_name() == 'BGM channel'
|
assert info.get_channel_name() == 'BGM channel'
|
||||||
assert info.get_thumbnail() == \
|
assert info.get_thumbnail() == \
|
||||||
'https://i.ytimg.com/vi/fEvM-OUbaKs/hqdefault_live.jpg'
|
'https://i.ytimg.com/vi/fEvM-OUbaKs/hqdefault_live.jpg'
|
||||||
@@ -38,25 +42,60 @@ def test_live_page(mocker):
|
|||||||
' - 24/7 Live Stream - Slow Jazz')
|
' - 24/7 Live Stream - Slow Jazz')
|
||||||
assert info.get_channel_id() == 'UCQINXHZqCU5i06HzxRkujfg'
|
assert info.get_channel_id() == 'UCQINXHZqCU5i06HzxRkujfg'
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_video_id(mocker):
|
def test_invalid_video_id(mocker):
|
||||||
'''Test case invalid video_id is specified.'''
|
'''Test case invalid video_id is specified.'''
|
||||||
_set_test_data(
|
_set_test_data(
|
||||||
'tests/testdata/videoinfo/invalid_video_id_page.txt', mocker)
|
'tests/testdata/videoinfo/invalid_video_id_page.txt', mocker)
|
||||||
try:
|
try:
|
||||||
_ = VideoInfo('test_id')
|
_ = VideoInfo('__test_id__')
|
||||||
assert False
|
assert False
|
||||||
except InvalidVideoIdException:
|
except InvalidVideoIdException:
|
||||||
assert True
|
assert True
|
||||||
|
|
||||||
|
|
||||||
def test_no_info(mocker):
|
def test_no_info(mocker):
|
||||||
'''Test case the video page has renderer, but no info.'''
|
'''Test case the video page has renderer, but no info.'''
|
||||||
_set_test_data(
|
_set_test_data(
|
||||||
'tests/testdata/videoinfo/no_info_page.txt', mocker)
|
'tests/testdata/videoinfo/no_info_page.txt', mocker)
|
||||||
info = VideoInfo('test_id')
|
info = VideoInfo('__test_id__')
|
||||||
assert info.video_id == 'test_id'
|
assert info.video_id == '__test_id__'
|
||||||
assert info.get_channel_name() is None
|
assert info.get_channel_name() is None
|
||||||
assert info.get_thumbnail() is None
|
assert info.get_thumbnail() is None
|
||||||
assert info.get_title() is None
|
assert info.get_title() is None
|
||||||
assert info.get_channel_id() is None
|
assert info.get_channel_id() is None
|
||||||
assert info.get_duration() is None
|
assert info.get_duration() is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_collapsed_data(mocker):
|
||||||
|
'''Test case the video page's info is collapsed.'''
|
||||||
|
_set_test_data(
|
||||||
|
'tests/testdata/videoinfo/collapsed_page.txt', mocker)
|
||||||
|
try:
|
||||||
|
_ = VideoInfo('__test_id__')
|
||||||
|
assert False
|
||||||
|
except JSONDecodeError:
|
||||||
|
assert True
|
||||||
|
|
||||||
|
|
||||||
|
def test_pattern_unmatch(mocker):
|
||||||
|
'''Test case the pattern for extraction is unmatched.'''
|
||||||
|
_set_test_data(
|
||||||
|
'tests/testdata/videoinfo/pattern_unmatch.txt', mocker)
|
||||||
|
try:
|
||||||
|
_ = VideoInfo('__test_id__')
|
||||||
|
assert False
|
||||||
|
except PatternUnmatchError:
|
||||||
|
assert True
|
||||||
|
|
||||||
|
|
||||||
|
def test_extradata_handling(mocker):
|
||||||
|
'''Test case the extracted data are JSON lines.'''
|
||||||
|
_set_test_data(
|
||||||
|
'tests/testdata/videoinfo/extradata_page.txt', mocker)
|
||||||
|
try:
|
||||||
|
_ = VideoInfo('__test_id__')
|
||||||
|
assert True
|
||||||
|
except JSONDecodeError as e:
|
||||||
|
print(e.doc)
|
||||||
|
assert False
|
||||||
|
|||||||
100
tests/testdata/default/newSponsor_current.json
vendored
Normal file
100
tests/testdata/default/newSponsor_current.json
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
{
|
||||||
|
"response": {
|
||||||
|
"responseContext": {
|
||||||
|
"webResponseContextExtensionData": ""
|
||||||
|
},
|
||||||
|
"continuationContents": {
|
||||||
|
"liveChatContinuation": {
|
||||||
|
"continuations": [
|
||||||
|
{
|
||||||
|
"invalidationContinuationData": {
|
||||||
|
"invalidationId": {
|
||||||
|
"objectSource": 1000,
|
||||||
|
"objectId": "___objectId___",
|
||||||
|
"topic": "chat~00000000000~0000000",
|
||||||
|
"subscribeToGcmTopics": true,
|
||||||
|
"protoCreationTimestampMs": "1577804400000"
|
||||||
|
},
|
||||||
|
"timeoutMs": 10000,
|
||||||
|
"continuation": "___continuation___"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"addChatItemAction": {
|
||||||
|
"item": {
|
||||||
|
"liveChatMembershipItemRenderer": {
|
||||||
|
"id": "dummy_id",
|
||||||
|
"timestampUsec": 1570678496000000,
|
||||||
|
"authorExternalChannelId": "author_channel_id",
|
||||||
|
"headerSubtext": {
|
||||||
|
"runs": [
|
||||||
|
{
|
||||||
|
"text": "新規メンバー"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"authorName": {
|
||||||
|
"simpleText": "author_name"
|
||||||
|
},
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"authorBadges": [
|
||||||
|
{
|
||||||
|
"liveChatAuthorBadgeRenderer": {
|
||||||
|
"customThumbnail": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/X=s32-c-k"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/X=s64-c-k"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"tooltip": "新規メンバー",
|
||||||
|
"accessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "新規メンバー"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"contextMenuEndpoint": {
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"liveChatItemContextMenuEndpoint": {
|
||||||
|
"params": "___params___"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"contextMenuAccessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "コメントの操作"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
82
tests/testdata/default/newSponsor_lagacy.json
vendored
Normal file
82
tests/testdata/default/newSponsor_lagacy.json
vendored
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
{
|
||||||
|
"response": {
|
||||||
|
"responseContext": {
|
||||||
|
"webResponseContextExtensionData": ""
|
||||||
|
},
|
||||||
|
"continuationContents": {
|
||||||
|
"liveChatContinuation": {
|
||||||
|
"continuations": [
|
||||||
|
{
|
||||||
|
"invalidationContinuationData": {
|
||||||
|
"invalidationId": {
|
||||||
|
"objectSource": 1000,
|
||||||
|
"objectId": "___objectId___",
|
||||||
|
"topic": "chat~00000000000~0000000",
|
||||||
|
"subscribeToGcmTopics": true,
|
||||||
|
"protoCreationTimestampMs": "1577804400000"
|
||||||
|
},
|
||||||
|
"timeoutMs": 10000,
|
||||||
|
"continuation": "___continuation___"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"addChatItemAction": {
|
||||||
|
"item": {
|
||||||
|
"liveChatLegacyPaidMessageRenderer": {
|
||||||
|
"id": "dummy_id",
|
||||||
|
"timestampUsec": 1570678496000000,
|
||||||
|
"eventText": {
|
||||||
|
"runs": [
|
||||||
|
{
|
||||||
|
"text": "新規メンバー"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"detailText": {
|
||||||
|
"simpleText": "ようこそ、author_name!"
|
||||||
|
},
|
||||||
|
"authorName": {
|
||||||
|
"simpleText": "author_name"
|
||||||
|
},
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"authorExternalChannelId": "author_channel_id",
|
||||||
|
"contextMenuEndpoint": {
|
||||||
|
"clickTrackingParams": "___clickTrackingParams___",
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"liveChatItemContextMenuEndpoint": {
|
||||||
|
"params": "___params___"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"contextMenuAccessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "コメントの操作"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
112
tests/testdata/default/replay_member_text.json
vendored
Normal file
112
tests/testdata/default/replay_member_text.json
vendored
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"response": {
|
||||||
|
"responseContext": {
|
||||||
|
"webResponseContextExtensionData": "data"
|
||||||
|
},
|
||||||
|
"continuationContents": {
|
||||||
|
"liveChatContinuation": {
|
||||||
|
"continuations": [
|
||||||
|
{
|
||||||
|
"liveChatReplayContinuationData": {
|
||||||
|
"invalidationId": {
|
||||||
|
"objectSource": 1000,
|
||||||
|
"objectId": "___objectId___",
|
||||||
|
"topic": "chat~00000000000~0000000",
|
||||||
|
"subscribeToGcmTopics": true,
|
||||||
|
"protoCreationTimestampMs": "1577804400000"
|
||||||
|
},
|
||||||
|
"timeoutMs": 10000,
|
||||||
|
"continuation": "___continuation___"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"replayChatItemAction": {
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"addChatItemAction": {
|
||||||
|
"item": {
|
||||||
|
"liveChatTextMessageRenderer": {
|
||||||
|
"message": {
|
||||||
|
"runs": [
|
||||||
|
{
|
||||||
|
"text": "dummy_message"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"authorName": {
|
||||||
|
"simpleText": "author_name"
|
||||||
|
},
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"contextMenuEndpoint": {
|
||||||
|
"clickTrackingParams": "___clickTrackingParams___",
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"liveChatItemContextMenuEndpoint": {
|
||||||
|
"params": "___params___"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"id": "dummy_id",
|
||||||
|
"timestampUsec": 1570678496000000,
|
||||||
|
"authorBadges": [
|
||||||
|
{
|
||||||
|
"liveChatAuthorBadgeRenderer": {
|
||||||
|
"customThumbnail": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/X=s16-c-k"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/X=s32-c-k"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"tooltip": "メンバー(1 か月)",
|
||||||
|
"accessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "メンバー(1 か月)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"authorExternalChannelId": "author_channel_id",
|
||||||
|
"contextMenuAccessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "コメントの操作"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"timestampText": {
|
||||||
|
"simpleText": "1:23:45"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"clientId": "dummy_client_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"videoOffsetTimeMsec": "5025120"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
184
tests/testdata/default/superchat.json
vendored
Normal file
184
tests/testdata/default/superchat.json
vendored
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
{
|
||||||
|
"response": {
|
||||||
|
"responseContext": {
|
||||||
|
"webResponseContextExtensionData": ""
|
||||||
|
},
|
||||||
|
"continuationContents": {
|
||||||
|
"liveChatContinuation": {
|
||||||
|
"continuations": [
|
||||||
|
{
|
||||||
|
"invalidationContinuationData": {
|
||||||
|
"invalidationId": {
|
||||||
|
"objectSource": 1000,
|
||||||
|
"objectId": "___objectId___",
|
||||||
|
"topic": "chat~00000000000~0000000",
|
||||||
|
"subscribeToGcmTopics": true,
|
||||||
|
"protoCreationTimestampMs": "1577804400000"
|
||||||
|
},
|
||||||
|
"timeoutMs": 10000,
|
||||||
|
"continuation": "___continuation___"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"addChatItemAction": {
|
||||||
|
"item": {
|
||||||
|
"liveChatPaidMessageRenderer": {
|
||||||
|
"id": "dummy_id",
|
||||||
|
"timestampUsec": 1570678496000000,
|
||||||
|
"authorName": {
|
||||||
|
"simpleText": "author_name"
|
||||||
|
},
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"purchaseAmountText": {
|
||||||
|
"simpleText": "¥800"
|
||||||
|
},
|
||||||
|
"message": {
|
||||||
|
"runs": [
|
||||||
|
{
|
||||||
|
"text": "dummy_message"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"headerBackgroundColor": 4278239141,
|
||||||
|
"headerTextColor": 4278190080,
|
||||||
|
"bodyBackgroundColor": 4280150454,
|
||||||
|
"bodyTextColor": 4278190080,
|
||||||
|
"authorExternalChannelId": "author_channel_id",
|
||||||
|
"authorNameTextColor": 2315255808,
|
||||||
|
"contextMenuEndpoint": {
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"liveChatItemContextMenuEndpoint": {
|
||||||
|
"params": "___params___"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"timestampColor": 2147483648,
|
||||||
|
"contextMenuAccessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "コメントの操作"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"addLiveChatTickerItemAction": {
|
||||||
|
"item": {
|
||||||
|
"liveChatTickerPaidMessageItemRenderer": {
|
||||||
|
"id": "dummy_id",
|
||||||
|
"amount": {
|
||||||
|
"simpleText": "¥846"
|
||||||
|
},
|
||||||
|
"amountTextColor": 4278190080,
|
||||||
|
"startBackgroundColor": 4280150454,
|
||||||
|
"endBackgroundColor": 4278239141,
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"durationSec": 120,
|
||||||
|
"showItemEndpoint": {
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"showLiveChatItemEndpoint": {
|
||||||
|
"renderer": {
|
||||||
|
"liveChatPaidMessageRenderer": {
|
||||||
|
"id": "dummy_id",
|
||||||
|
"timestampUsec": 1570678496000000,
|
||||||
|
"authorName": {
|
||||||
|
"simpleText": "author_name"
|
||||||
|
},
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"purchaseAmountText": {
|
||||||
|
"simpleText": "¥846"
|
||||||
|
},
|
||||||
|
"message": {
|
||||||
|
"runs": [
|
||||||
|
{
|
||||||
|
"text": "dummy_message"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"headerBackgroundColor": 4278239141,
|
||||||
|
"headerTextColor": 4278190080,
|
||||||
|
"bodyBackgroundColor": 4280150454,
|
||||||
|
"bodyTextColor": 4278190080,
|
||||||
|
"authorExternalChannelId": "author_channel_id",
|
||||||
|
"authorNameTextColor": 2315255808,
|
||||||
|
"contextMenuEndpoint": {
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"liveChatItemContextMenuEndpoint": {
|
||||||
|
"params": "___params___"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"timestampColor": 2147483648,
|
||||||
|
"contextMenuAccessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "コメントの操作"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"authorExternalChannelId": "http://www.youtube.com/channel/author_channel_url",
|
||||||
|
"fullDurationSec": 120
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"durationSec": "120"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
99
tests/testdata/default/supersticker.json
vendored
Normal file
99
tests/testdata/default/supersticker.json
vendored
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
{
|
||||||
|
"response": {
|
||||||
|
"responseContext": {
|
||||||
|
"webResponseContextExtensionData": ""
|
||||||
|
},
|
||||||
|
"continuationContents": {
|
||||||
|
"liveChatContinuation": {
|
||||||
|
"continuations": [
|
||||||
|
{
|
||||||
|
"invalidationContinuationData": {
|
||||||
|
"invalidationId": {
|
||||||
|
"objectSource": 1000,
|
||||||
|
"objectId": "___objectId___",
|
||||||
|
"topic": "chat~00000000000~0000000",
|
||||||
|
"subscribeToGcmTopics": true,
|
||||||
|
"protoCreationTimestampMs": "1577804400000"
|
||||||
|
},
|
||||||
|
"timeoutMs": 10000,
|
||||||
|
"continuation": "___continuation___"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"addChatItemAction": {
|
||||||
|
"item": {
|
||||||
|
"liveChatPaidStickerRenderer": {
|
||||||
|
"id": "dummy_id",
|
||||||
|
"contextMenuEndpoint": {
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"liveChatItemContextMenuEndpoint": {
|
||||||
|
"params": "___params___"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"contextMenuAccessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "コメントの操作"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"timestampUsec": 1570678496000000,
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"authorName": {
|
||||||
|
"simpleText": "author_name"
|
||||||
|
},
|
||||||
|
"authorExternalChannelId": "author_channel_id",
|
||||||
|
"sticker": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "//lh3.googleusercontent.com/param_s=s72-rp",
|
||||||
|
"width": 72,
|
||||||
|
"height": 72
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "//lh3.googleusercontent.com/param_s=s144-rp",
|
||||||
|
"width": 144,
|
||||||
|
"height": 144
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"accessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "___sticker_label___"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"moneyChipBackgroundColor": 4278248959,
|
||||||
|
"moneyChipTextColor": 4278190080,
|
||||||
|
"purchaseAmountText": {
|
||||||
|
"simpleText": "¥200"
|
||||||
|
},
|
||||||
|
"stickerDisplayWidth": 72,
|
||||||
|
"stickerDisplayHeight": 72,
|
||||||
|
"backgroundColor": 4278237396,
|
||||||
|
"authorNameTextColor": 3003121664
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
79
tests/testdata/default/textmessage.json
vendored
Normal file
79
tests/testdata/default/textmessage.json
vendored
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
{
|
||||||
|
"response": {
|
||||||
|
"responseContext": {
|
||||||
|
"webResponseContextExtensionData": ""
|
||||||
|
},
|
||||||
|
"continuationContents": {
|
||||||
|
"liveChatContinuation": {
|
||||||
|
"continuations": [
|
||||||
|
{
|
||||||
|
"invalidationContinuationData": {
|
||||||
|
"invalidationId": {
|
||||||
|
"objectSource": 1000,
|
||||||
|
"objectId": "___objectId___",
|
||||||
|
"topic": "chat~00000000000~0000000",
|
||||||
|
"subscribeToGcmTopics": true,
|
||||||
|
"protoCreationTimestampMs": "1577804400000"
|
||||||
|
},
|
||||||
|
"timeoutMs": 10000,
|
||||||
|
"continuation": "___continuation___"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"addChatItemAction": {
|
||||||
|
"item": {
|
||||||
|
"liveChatTextMessageRenderer": {
|
||||||
|
"message": {
|
||||||
|
"runs": [
|
||||||
|
{
|
||||||
|
"text": "dummy_message"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"authorName": {
|
||||||
|
"simpleText": "author_name"
|
||||||
|
},
|
||||||
|
"authorPhoto": {
|
||||||
|
"thumbnails": [
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 32,
|
||||||
|
"height": 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||||
|
"width": 64,
|
||||||
|
"height": 64
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"contextMenuEndpoint": {
|
||||||
|
"commandMetadata": {
|
||||||
|
"webCommandMetadata": {
|
||||||
|
"ignoreNavigation": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"liveChatItemContextMenuEndpoint": {
|
||||||
|
"params": "___params___"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"id": "dummy_id",
|
||||||
|
"timestampUsec": 1570678496000000,
|
||||||
|
"authorExternalChannelId": "author_channel_id",
|
||||||
|
"contextMenuAccessibility": {
|
||||||
|
"accessibilityData": {
|
||||||
|
"label": "コメントの操作"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"clientId": "dummy_client_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
15
tests/testdata/videoinfo/collapsed_page.txt
vendored
Normal file
15
tests/testdata/videoinfo/collapsed_page.txt
vendored
Normal file
File diff suppressed because one or more lines are too long
15
tests/testdata/videoinfo/extradata_page.txt
vendored
Normal file
15
tests/testdata/videoinfo/extradata_page.txt
vendored
Normal file
File diff suppressed because one or more lines are too long
15
tests/testdata/videoinfo/pattern_unmatch.txt
vendored
Normal file
15
tests/testdata/videoinfo/pattern_unmatch.txt
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user