Compare commits
355 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e865c25a4e | ||
|
|
6d581c22f9 | ||
|
|
87aadeeb58 | ||
|
|
d331131cfe | ||
|
|
0dce1b009a | ||
|
|
bd77cd2058 | ||
|
|
8e47e8d5db | ||
|
|
946dd155d8 | ||
|
|
3565153597 | ||
|
|
f6c6ec5603 | ||
|
|
1cd0bab60b | ||
|
|
1c246eea6d | ||
|
|
3a365243d9 | ||
|
|
7f8882c9e9 | ||
|
|
bc401ac80f | ||
|
|
0a9837adca | ||
|
|
f4bf30c0e9 | ||
|
|
acfba74821 | ||
|
|
f46845c777 | ||
|
|
74f1536553 | ||
|
|
af4c2fe4b9 | ||
|
|
b7c656536d | ||
|
|
faf875c0f5 | ||
|
|
b3ebe3879d | ||
|
|
da79895e55 | ||
|
|
aaa7421fdf | ||
|
|
b9f213f047 | ||
|
|
fee070b299 | ||
|
|
275e28b635 | ||
|
|
808e599be6 | ||
|
|
5cb6f7f123 | ||
|
|
a2f1c658f0 | ||
|
|
05de644d77 | ||
|
|
b908855566 | ||
|
|
8d93bfcb95 | ||
|
|
bf68859f38 | ||
|
|
78fbe97b66 | ||
|
|
166a256c1c | ||
|
|
b7f2967a4f | ||
|
|
0a8ff3abdc | ||
|
|
9b38a5428d | ||
|
|
9311bf1993 | ||
|
|
ee839da7c9 | ||
|
|
2ae77b3850 | ||
|
|
afd7cea635 | ||
|
|
9018ff9ee4 | ||
|
|
b676912d64 | ||
|
|
d75e1db304 | ||
|
|
0d7dd26ede | ||
|
|
24b9f45e4a | ||
|
|
821cc57024 | ||
|
|
f050f5d650 | ||
|
|
c4eb78e112 | ||
|
|
40b0138a92 | ||
|
|
1d80dce913 | ||
|
|
8b18f4476a | ||
|
|
35a27fee3e | ||
|
|
eca766ff1a | ||
|
|
139dc7fce2 | ||
|
|
2c8a883ee3 | ||
|
|
865e4b5fab | ||
|
|
02d48ceccc | ||
|
|
bc3f16e86b | ||
|
|
1e6ce58f8b | ||
|
|
4db9486853 | ||
|
|
775b79642e | ||
|
|
423a128882 | ||
|
|
aaf9860bdc | ||
|
|
83ad4dcf1f | ||
|
|
765251b872 | ||
|
|
7ea88fead2 | ||
|
|
ea67e3e54e | ||
|
|
a5c7ba52c8 | ||
|
|
7cf780ee87 | ||
|
|
c37201fa03 | ||
|
|
6fcc1393de | ||
|
|
a474899268 | ||
|
|
3f72eb0e00 | ||
|
|
661d1e4b81 | ||
|
|
4652a56bc6 | ||
|
|
966320cab5 | ||
|
|
35218a66da | ||
|
|
3432609588 | ||
|
|
3ad6b7e845 | ||
|
|
48669e5f53 | ||
|
|
7b0708ec46 | ||
|
|
f46df3ae42 | ||
|
|
96c028bd5d | ||
|
|
402dc15d7a | ||
|
|
6088ab6932 | ||
|
|
13812bdad3 | ||
|
|
d98d34d8b3 | ||
|
|
24fa104e84 | ||
|
|
b4dad8c641 | ||
|
|
3550cd6d91 | ||
|
|
2815b48e0e | ||
|
|
650e6ccb65 | ||
|
|
4a00a19a43 | ||
|
|
b067eda7b6 | ||
|
|
1b6bc86e76 | ||
|
|
da2b513bcc | ||
|
|
6adae578ef | ||
|
|
128a834841 | ||
|
|
086a14115f | ||
|
|
6a392f3e1a | ||
|
|
93127a703c | ||
|
|
e4ddbaf8ae | ||
|
|
ec75058605 | ||
|
|
2b62e5dc5e | ||
|
|
8d7874096e | ||
|
|
99fcab83c8 | ||
|
|
3027bc0579 | ||
|
|
b1b70a4e76 | ||
|
|
de41341d84 | ||
|
|
a03d43b081 | ||
|
|
f60aaade7f | ||
|
|
d3c34086ff | ||
|
|
6b58c9bcf5 | ||
|
|
c2cba1651e | ||
|
|
ada3eb437d | ||
|
|
c1517d5be8 | ||
|
|
351034d1e6 | ||
|
|
c1db5a0c47 | ||
|
|
088dce712a | ||
|
|
425e880b09 | ||
|
|
62ec78abee | ||
|
|
c84a32682c | ||
|
|
74277b2afe | ||
|
|
cd20b74b2a | ||
|
|
06f54fd985 | ||
|
|
98b0470703 | ||
|
|
bb4113b53c | ||
|
|
07f4382ed4 | ||
|
|
d40720616b | ||
|
|
eebe7c79bd | ||
|
|
6c9e327e36 | ||
|
|
e9161c0ddd | ||
|
|
c8b75dcf0e | ||
|
|
30cb7d7043 | ||
|
|
19d5b74beb | ||
|
|
d5c3e45edc | ||
|
|
1d479fc15c | ||
|
|
20a20ddd08 | ||
|
|
00c239f974 | ||
|
|
67b766b32c | ||
|
|
249aa0d147 | ||
|
|
c708a588d8 | ||
|
|
cb15df525f | ||
|
|
fcddc1516b | ||
|
|
a7732efd07 | ||
|
|
0a2f4e8418 | ||
|
|
0c0ba0dfe6 | ||
|
|
02827b174e | ||
|
|
81dee8a218 | ||
|
|
5eb8bdbd0e | ||
|
|
a37602e666 | ||
|
|
306b69198e | ||
|
|
175e457052 | ||
|
|
5633a48618 | ||
|
|
d7e608e8a1 | ||
|
|
213427fab3 | ||
|
|
3427c6fb69 | ||
|
|
603c4470b7 | ||
|
|
37c8b7ae45 | ||
|
|
d362152c77 | ||
|
|
8f5c3f312a | ||
|
|
15a1d5c210 | ||
|
|
499cf26fa8 | ||
|
|
90596be880 | ||
|
|
50d7b097e6 | ||
|
|
b8d5ec5465 | ||
|
|
3200c5654f | ||
|
|
4905b1e4d8 | ||
|
|
16df63c14e | ||
|
|
e950dff9d2 | ||
|
|
39d99ad4af | ||
|
|
3675c91240 | ||
|
|
46258f625a | ||
|
|
2cc161b589 | ||
|
|
115277e5e1 | ||
|
|
ebf0e7c181 | ||
|
|
b418898eef | ||
|
|
3106b3e545 | ||
|
|
50816a661d | ||
|
|
6755bc8bb2 | ||
|
|
d62e7730ab | ||
|
|
26be989b9b | ||
|
|
73ad0a1f44 | ||
|
|
66b185ebf7 | ||
|
|
8bd82713e2 | ||
|
|
71650c39f7 | ||
|
|
488445c73b | ||
|
|
075e811efe | ||
|
|
9f9b83f185 | ||
|
|
58d9bf7fdb | ||
|
|
b3e6275de7 | ||
|
|
748778f545 | ||
|
|
b2a68d0a74 | ||
|
|
e29b3b8377 | ||
|
|
0859ed5fb1 | ||
|
|
a80d5ba080 | ||
|
|
ac2924824e | ||
|
|
b7e6043a71 | ||
|
|
820ba35013 | ||
|
|
ecd2d130bf | ||
|
|
1d410b6e68 | ||
|
|
f77a2c889b | ||
|
|
47d5ab288f | ||
|
|
5f53fd24dd | ||
|
|
11a9d0e2d7 | ||
|
|
6f18de46f7 | ||
|
|
480c9e15b8 | ||
|
|
35aa7636f6 | ||
|
|
8fee67c2d4 | ||
|
|
74bfdd07e2 | ||
|
|
d3f1643a40 | ||
|
|
eb29f27493 | ||
|
|
8adf75ab83 | ||
|
|
2e05803d75 | ||
|
|
f16c0ee73a | ||
|
|
a338f2b782 | ||
|
|
864ccddfd7 | ||
|
|
339df69e36 | ||
|
|
76a5b0cd18 | ||
|
|
be0ab2431b | ||
|
|
2edb60c592 | ||
|
|
2c6c3a1ca3 | ||
|
|
4be540793d | ||
|
|
08b86fe596 | ||
|
|
157f3b9952 | ||
|
|
8f3ca2662a | ||
|
|
c4b015861c | ||
|
|
3aa413d59e | ||
|
|
03ba285a16 | ||
|
|
5fe0ee5aa8 | ||
|
|
4e829a25d4 | ||
|
|
15132a9bb8 | ||
|
|
64ace9dad6 | ||
|
|
9a2e96d3a0 | ||
|
|
a3695a59b8 | ||
|
|
bc8655ed62 | ||
|
|
3bdc465740 | ||
|
|
235d6b7212 | ||
|
|
9f0754da57 | ||
|
|
306b0a4564 | ||
|
|
1c49387f1a | ||
|
|
300d96e56c | ||
|
|
0e301f48a8 | ||
|
|
a790ab13a9 | ||
|
|
0456300d19 | ||
|
|
2ef1e7028f | ||
|
|
9413c4a186 | ||
|
|
8a8cef399f | ||
|
|
3bcad12cf6 | ||
|
|
4eb18279fe | ||
|
|
e9ed564e1b | ||
|
|
95f975c93d | ||
|
|
8012e1d191 | ||
|
|
f9480ea1eb | ||
|
|
404727c49c | ||
|
|
6b924a88ef | ||
|
|
56294d6a67 | ||
|
|
283443e374 | ||
|
|
89b51c420f | ||
|
|
96474f10c6 | ||
|
|
5f78a99507 | ||
|
|
78373bf45c | ||
|
|
3e11deed8f | ||
|
|
6daa375adf | ||
|
|
497d84015e | ||
|
|
a90bda674d | ||
|
|
48543b7866 | ||
|
|
5d3c7b5abd | ||
|
|
8df7062873 | ||
|
|
b788f692ad | ||
|
|
713215f1d7 | ||
|
|
f16ef60f11 | ||
|
|
9bbdb6c4de | ||
|
|
2200abf204 | ||
|
|
3ed0cb2c35 | ||
|
|
5fa4d051ee | ||
|
|
cd6d522055 | ||
|
|
aa8a4fb592 | ||
|
|
92a01aa4d9 | ||
|
|
dbde072828 | ||
|
|
e3f9f95fb1 | ||
|
|
fa02116ab4 | ||
|
|
d8656161cd | ||
|
|
174d9f27c0 | ||
|
|
0abf8dd9f0 | ||
|
|
5ab653a1b2 | ||
|
|
6e6bb8e019 | ||
|
|
ee4b696fc5 | ||
|
|
fd1d283caa | ||
|
|
85966186b5 | ||
|
|
71341d2876 | ||
|
|
8882c82f8b | ||
|
|
cf6ed24864 | ||
|
|
584b9c5591 | ||
|
|
167c8acb93 | ||
|
|
75a31bd245 | ||
|
|
366d75c2bb | ||
|
|
b7ff2b6537 | ||
|
|
5dfd883fc9 | ||
|
|
133a8afb27 | ||
|
|
971e4bdf39 | ||
|
|
f78bfde59e | ||
|
|
a7379fd93f | ||
|
|
1cc3661d35 | ||
|
|
6c781483a9 | ||
|
|
5c3280f858 | ||
|
|
7500f79de0 | ||
|
|
94d4eebd0f | ||
|
|
2474207691 | ||
|
|
e6dbc8772e | ||
|
|
8f91e031f3 | ||
|
|
870d1f3fbe | ||
|
|
141dbcd2da | ||
|
|
6eb848f1c9 | ||
|
|
8d7fc03fe0 | ||
|
|
970e63cb38 | ||
|
|
1c817b6476 | ||
|
|
51eff10eeb | ||
|
|
18b88200a8 | ||
|
|
c95d70a232 | ||
|
|
7640586591 | ||
|
|
f7ec14e166 | ||
|
|
a4dacdb7d7 | ||
|
|
785a82b618 | ||
|
|
faf886eebd | ||
|
|
8a627414cb | ||
|
|
d14262cbcb | ||
|
|
da7c694dfb | ||
|
|
9aa35b9756 | ||
|
|
f0a1a509a0 | ||
|
|
5ebca605ac | ||
|
|
3826b32ab9 | ||
|
|
a46c82d3c0 | ||
|
|
206d052907 | ||
|
|
141d7a9299 | ||
|
|
04457eaa5c | ||
|
|
bd32c75833 | ||
|
|
84bae4ad2a | ||
|
|
d72608bf0a | ||
|
|
3243d69d7a | ||
|
|
6e1b735ebc | ||
|
|
c54481dad5 | ||
|
|
78604c84d4 | ||
|
|
21d93613a2 | ||
|
|
56bf721330 | ||
|
|
5f50598f79 | ||
|
|
5e8c438c6b | ||
|
|
23e47f6fb0 | ||
|
|
74dfe0a612 | ||
|
|
725af25d81 |
27
.github/workflows/run_test.yml
vendored
Normal file
27
.github/workflows/run_test.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Run All UnitTest
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: [3.7, 3.8, 3.9]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt -r requirements_test.txt
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
export PYTHONPATH=./
|
||||
pytest --verbose --color=yes
|
||||
39
.gitignore
vendored
Normal file
39
.gitignore
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
.pytest_cache/
|
||||
|
||||
#logs
|
||||
log.txt
|
||||
*.log
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
|
||||
# VSCode
|
||||
.vscode
|
||||
|
||||
# develop
|
||||
*.egg-info/
|
||||
|
||||
# Pypi dist
|
||||
dist/
|
||||
README.rst
|
||||
temporary/
|
||||
# Pypi wheel
|
||||
build/
|
||||
exclude/
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 taizan-hokuto
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
13
Pipfile
Normal file
13
Pipfile
Normal file
@@ -0,0 +1,13 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
httpx = {extras = ["http2"], version = "0.16.1"}
|
||||
|
||||
[dev-packages]
|
||||
pytest-mock = "*"
|
||||
pytest-httpx = "*"
|
||||
wheel = "*"
|
||||
twine = "*"
|
||||
448
Pipfile.lock
generated
Normal file
448
Pipfile.lock
generated
Normal file
@@ -0,0 +1,448 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "e1eb34f14c75998519a90838b283ccd23bd168afa8e4837f956c5c4df66376f9"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee",
|
||||
"sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"
|
||||
],
|
||||
"version": "==2021.5.30"
|
||||
},
|
||||
"h11": {
|
||||
"hashes": [
|
||||
"sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6",
|
||||
"sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"
|
||||
],
|
||||
"version": "==0.12.0"
|
||||
},
|
||||
"h2": {
|
||||
"hashes": [
|
||||
"sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5",
|
||||
"sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14"
|
||||
],
|
||||
"version": "==3.2.0"
|
||||
},
|
||||
"hpack": {
|
||||
"hashes": [
|
||||
"sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89",
|
||||
"sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"
|
||||
],
|
||||
"version": "==3.0.0"
|
||||
},
|
||||
"httpcore": {
|
||||
"hashes": [
|
||||
"sha256:37ae835fb370049b2030c3290e12ed298bf1473c41bb72ca4aa78681eba9b7c9",
|
||||
"sha256:93e822cd16c32016b414b789aeff4e855d0ccbfc51df563ee34d4dbadbb3bcdc"
|
||||
],
|
||||
"version": "==0.12.3"
|
||||
},
|
||||
"httpx": {
|
||||
"extras": [
|
||||
"http2"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:126424c279c842738805974687e0518a94c7ae8d140cd65b9c4f77ac46ffa537",
|
||||
"sha256:9cffb8ba31fac6536f2c8cde30df859013f59e4bcc5b8d43901cb3654a8e0a5b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.16.1"
|
||||
},
|
||||
"hyperframe": {
|
||||
"hashes": [
|
||||
"sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40",
|
||||
"sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"
|
||||
],
|
||||
"version": "==5.2.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a",
|
||||
"sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"
|
||||
],
|
||||
"version": "==3.2"
|
||||
},
|
||||
"rfc3986": {
|
||||
"extras": [
|
||||
"idna2008"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835",
|
||||
"sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"
|
||||
],
|
||||
"version": "==1.5.0"
|
||||
},
|
||||
"sniffio": {
|
||||
"hashes": [
|
||||
"sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663",
|
||||
"sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"
|
||||
],
|
||||
"version": "==1.2.0"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
"sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1",
|
||||
"sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"
|
||||
],
|
||||
"version": "==21.2.0"
|
||||
},
|
||||
"bleach": {
|
||||
"hashes": [
|
||||
"sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125",
|
||||
"sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433"
|
||||
],
|
||||
"version": "==3.3.0"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee",
|
||||
"sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"
|
||||
],
|
||||
"version": "==2021.5.30"
|
||||
},
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813",
|
||||
"sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373",
|
||||
"sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69",
|
||||
"sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f",
|
||||
"sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06",
|
||||
"sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05",
|
||||
"sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea",
|
||||
"sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee",
|
||||
"sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0",
|
||||
"sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396",
|
||||
"sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7",
|
||||
"sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f",
|
||||
"sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73",
|
||||
"sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315",
|
||||
"sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76",
|
||||
"sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1",
|
||||
"sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49",
|
||||
"sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed",
|
||||
"sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892",
|
||||
"sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482",
|
||||
"sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058",
|
||||
"sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5",
|
||||
"sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53",
|
||||
"sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045",
|
||||
"sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3",
|
||||
"sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55",
|
||||
"sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5",
|
||||
"sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e",
|
||||
"sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c",
|
||||
"sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369",
|
||||
"sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827",
|
||||
"sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053",
|
||||
"sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa",
|
||||
"sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4",
|
||||
"sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322",
|
||||
"sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132",
|
||||
"sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62",
|
||||
"sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa",
|
||||
"sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0",
|
||||
"sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396",
|
||||
"sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e",
|
||||
"sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991",
|
||||
"sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6",
|
||||
"sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc",
|
||||
"sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1",
|
||||
"sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406",
|
||||
"sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333",
|
||||
"sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d",
|
||||
"sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"
|
||||
],
|
||||
"version": "==1.14.5"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
"sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
|
||||
"sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
|
||||
],
|
||||
"version": "==4.0.0"
|
||||
},
|
||||
"colorama": {
|
||||
"hashes": [
|
||||
"sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b",
|
||||
"sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"
|
||||
],
|
||||
"version": "==0.4.4"
|
||||
},
|
||||
"cryptography": {
|
||||
"hashes": [
|
||||
"sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d",
|
||||
"sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959",
|
||||
"sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6",
|
||||
"sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873",
|
||||
"sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2",
|
||||
"sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713",
|
||||
"sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1",
|
||||
"sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177",
|
||||
"sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250",
|
||||
"sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca",
|
||||
"sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d",
|
||||
"sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"
|
||||
],
|
||||
"version": "==3.4.7"
|
||||
},
|
||||
"docutils": {
|
||||
"hashes": [
|
||||
"sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125",
|
||||
"sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"
|
||||
],
|
||||
"version": "==0.17.1"
|
||||
},
|
||||
"h11": {
|
||||
"hashes": [
|
||||
"sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6",
|
||||
"sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"
|
||||
],
|
||||
"version": "==0.12.0"
|
||||
},
|
||||
"httpcore": {
|
||||
"hashes": [
|
||||
"sha256:37ae835fb370049b2030c3290e12ed298bf1473c41bb72ca4aa78681eba9b7c9",
|
||||
"sha256:93e822cd16c32016b414b789aeff4e855d0ccbfc51df563ee34d4dbadbb3bcdc"
|
||||
],
|
||||
"version": "==0.12.3"
|
||||
},
|
||||
"httpx": {
|
||||
"extras": [
|
||||
"http2"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:126424c279c842738805974687e0518a94c7ae8d140cd65b9c4f77ac46ffa537",
|
||||
"sha256:9cffb8ba31fac6536f2c8cde30df859013f59e4bcc5b8d43901cb3654a8e0a5b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.16.1"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a",
|
||||
"sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"
|
||||
],
|
||||
"version": "==3.2"
|
||||
},
|
||||
"importlib-metadata": {
|
||||
"hashes": [
|
||||
"sha256:960d52ba7c21377c990412aca380bf3642d734c2eaab78a2c39319f67c6a5786",
|
||||
"sha256:e592faad8de1bda9fe920cf41e15261e7131bcf266c30306eec00e8e225c1dd5"
|
||||
],
|
||||
"version": "==4.4.0"
|
||||
},
|
||||
"iniconfig": {
|
||||
"hashes": [
|
||||
"sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
|
||||
"sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
|
||||
],
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"jeepney": {
|
||||
"hashes": [
|
||||
"sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657",
|
||||
"sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae"
|
||||
],
|
||||
"markers": "sys_platform == 'linux'",
|
||||
"version": "==0.6.0"
|
||||
},
|
||||
"keyring": {
|
||||
"hashes": [
|
||||
"sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8",
|
||||
"sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48"
|
||||
],
|
||||
"version": "==23.0.1"
|
||||
},
|
||||
"packaging": {
|
||||
"hashes": [
|
||||
"sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
|
||||
"sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
|
||||
],
|
||||
"version": "==20.9"
|
||||
},
|
||||
"pkginfo": {
|
||||
"hashes": [
|
||||
"sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4",
|
||||
"sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75"
|
||||
],
|
||||
"version": "==1.7.0"
|
||||
},
|
||||
"pluggy": {
|
||||
"hashes": [
|
||||
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
|
||||
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
|
||||
],
|
||||
"version": "==0.13.1"
|
||||
},
|
||||
"py": {
|
||||
"hashes": [
|
||||
"sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3",
|
||||
"sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"
|
||||
],
|
||||
"version": "==1.10.0"
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
|
||||
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
|
||||
],
|
||||
"version": "==2.20"
|
||||
},
|
||||
"pygments": {
|
||||
"hashes": [
|
||||
"sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f",
|
||||
"sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"
|
||||
],
|
||||
"version": "==2.9.0"
|
||||
},
|
||||
"pyparsing": {
|
||||
"hashes": [
|
||||
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
|
||||
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
|
||||
],
|
||||
"version": "==2.4.7"
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b",
|
||||
"sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"
|
||||
],
|
||||
"version": "==6.2.4"
|
||||
},
|
||||
"pytest-httpx": {
|
||||
"hashes": [
|
||||
"sha256:0a7c56e559b23efbf857054cd74de60a7c540694a162423f89c70da6ad358d8e",
|
||||
"sha256:d32e8f6fb7e028f0313f5f5a2d463c8673eb43fd11a9bfe8527299717a7764c4"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.10.1"
|
||||
},
|
||||
"pytest-mock": {
|
||||
"hashes": [
|
||||
"sha256:024e405ad382646318c4281948aadf6fe1135632bea9cc67366ea0c4098ef5f2",
|
||||
"sha256:a4d6d37329e4a893e77d9ffa89e838dd2b45d5dc099984cf03c703ac8411bb82"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.3.1"
|
||||
},
|
||||
"readme-renderer": {
|
||||
"hashes": [
|
||||
"sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c",
|
||||
"sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db"
|
||||
],
|
||||
"version": "==29.0"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804",
|
||||
"sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
|
||||
],
|
||||
"version": "==2.25.1"
|
||||
},
|
||||
"requests-toolbelt": {
|
||||
"hashes": [
|
||||
"sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f",
|
||||
"sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
|
||||
],
|
||||
"version": "==0.9.1"
|
||||
},
|
||||
"rfc3986": {
|
||||
"extras": [
|
||||
"idna2008"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835",
|
||||
"sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"
|
||||
],
|
||||
"version": "==1.5.0"
|
||||
},
|
||||
"secretstorage": {
|
||||
"hashes": [
|
||||
"sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f",
|
||||
"sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"
|
||||
],
|
||||
"markers": "sys_platform == 'linux'",
|
||||
"version": "==3.3.1"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
||||
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
|
||||
],
|
||||
"version": "==1.16.0"
|
||||
},
|
||||
"sniffio": {
|
||||
"hashes": [
|
||||
"sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663",
|
||||
"sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"
|
||||
],
|
||||
"version": "==1.2.0"
|
||||
},
|
||||
"toml": {
|
||||
"hashes": [
|
||||
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
|
||||
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
|
||||
],
|
||||
"version": "==0.10.2"
|
||||
},
|
||||
"tqdm": {
|
||||
"hashes": [
|
||||
"sha256:736524215c690621b06fc89d0310a49822d75e599fcd0feb7cc742b98d692493",
|
||||
"sha256:cd5791b5d7c3f2f1819efc81d36eb719a38e0906a7380365c556779f585ea042"
|
||||
],
|
||||
"version": "==4.61.0"
|
||||
},
|
||||
"twine": {
|
||||
"hashes": [
|
||||
"sha256:34352fd52ec3b9d29837e6072d5a2a7c6fe4290e97bba46bb8d478b5c598f7ab",
|
||||
"sha256:ba9ff477b8d6de0c89dd450e70b2185da190514e91c42cc62f96850025c10472"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.2.0"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c",
|
||||
"sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.26.5"
|
||||
},
|
||||
"webencodings": {
|
||||
"hashes": [
|
||||
"sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78",
|
||||
"sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
|
||||
],
|
||||
"version": "==0.5.1"
|
||||
},
|
||||
"wheel": {
|
||||
"hashes": [
|
||||
"sha256:906864fb722c0ab5f2f9c35b2c65e3af3c009402c108a709c0aca27bc2c9187b",
|
||||
"sha256:aaef9b8c36db72f8bf7f1e54f85f875c4d466819940863ca0b3f3f77f0a1646f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.36.1"
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76",
|
||||
"sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"
|
||||
],
|
||||
"version": "==3.4.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
170
README.md
170
README.md
@@ -3,20 +3,20 @@ pytchat
|
||||
|
||||
pytchat is a python library for fetching youtube live chat.
|
||||
|
||||
|
||||
<br><br><br>
|
||||
## Description
|
||||
pytchat is a python library for fetching youtube live chat
|
||||
without using youtube api, Selenium or BeautifulSoup.
|
||||
|
||||
pytchatはAPIを使わずにYouTubeチャットを取得するための軽量pythonライブラリです。
|
||||
without using Selenium or BeautifulSoup.
|
||||
|
||||
Other features:
|
||||
+ Customizable chat data processors including youtube api compatible one.
|
||||
+ Customizable [chat data processors](https://github.com/taizan-hokuto/pytchat/wiki/ChatProcessor) including youtube api compatible one.
|
||||
+ Available on asyncio context.
|
||||
+ Quick fetching of initial chat data by generating continuation params
|
||||
instead of web scraping.
|
||||
|
||||
For more detailed information, see [wiki](https://github.com/taizan-hokuto/pytchat/wiki). <br>
|
||||
より詳細な解説は[wiki](https://github.com/taizan-hokuto/pytchat/wiki/Home_jp)を参照してください。
|
||||
[wiki (Japanese)](https://github.com/taizan-hokuto/pytchat/wiki/Home_jp)
|
||||
|
||||
## Install
|
||||
```python
|
||||
@@ -24,133 +24,44 @@ pip install pytchat
|
||||
```
|
||||
## Examples
|
||||
|
||||
### CLI
|
||||
|
||||
One-liner command.
|
||||
Save chat data to html.
|
||||
|
||||
```bash
|
||||
$ pytchat -v ZJ6Q4U_Vg6s -o "c:/temp/"
|
||||
|
||||
# options:
|
||||
# -v : video_id
|
||||
# -o : output directory (default path: './')
|
||||
# saved filename is [video_id].html
|
||||
```
|
||||
|
||||
|
||||
### on-demand mode
|
||||
### Fetch chat data (see [wiki](https://github.com/taizan-hokuto/pytchat/wiki/PytchatCore))
|
||||
```python
|
||||
from pytchat import LiveChat
|
||||
livechat = LiveChat(video_id = "Zvp1pJpie4I")
|
||||
|
||||
while livechat.is_alive():
|
||||
try:
|
||||
chatdata = livechat.get()
|
||||
for c in chatdata.items:
|
||||
print(f"{c.datetime} [{c.author.name}]- {c.message}")
|
||||
chatdata.tick()
|
||||
except KeyboardInterrupt:
|
||||
livechat.terminate()
|
||||
break
|
||||
```
|
||||
|
||||
### callback mode
|
||||
```python
|
||||
from pytchat import LiveChat
|
||||
import time
|
||||
|
||||
def main():
|
||||
livechat = LiveChat(video_id = "Zvp1pJpie4I", callback = disp)
|
||||
while livechat.is_alive():
|
||||
#other background operation.
|
||||
time.sleep(1)
|
||||
livechat.terminate()
|
||||
|
||||
#callback function (automatically called)
|
||||
def disp(chatdata):
|
||||
for c in chatdata.items:
|
||||
print(f"{c.datetime} [{c.author.name}]- {c.message}")
|
||||
chatdata.tick()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
```
|
||||
|
||||
### asyncio context:
|
||||
```python
|
||||
from pytchat import LiveChatAsync
|
||||
from concurrent.futures import CancelledError
|
||||
import asyncio
|
||||
|
||||
async def main():
|
||||
livechat = LiveChatAsync("Zvp1pJpie4I", callback = func)
|
||||
while livechat.is_alive():
|
||||
#other background operation.
|
||||
await asyncio.sleep(3)
|
||||
|
||||
#callback function is automatically called.
|
||||
async def func(chatdata):
|
||||
for c in chatdata.items:
|
||||
print(f"{c.datetime} [{c.author.name}]-{c.message} {c.amountString}")
|
||||
await chatdata.tick_async()
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(main())
|
||||
except CancelledError:
|
||||
pass
|
||||
```
|
||||
|
||||
|
||||
### youtube api compatible processor:
|
||||
```python
|
||||
from pytchat import LiveChat, CompatibleProcessor
|
||||
import time
|
||||
|
||||
chat = LiveChat("Zvp1pJpie4I",
|
||||
processor = CompatibleProcessor() )
|
||||
|
||||
import pytchat
|
||||
chat = pytchat.create(video_id="uIx8l2xlYVY")
|
||||
while chat.is_alive():
|
||||
try:
|
||||
data = chat.get()
|
||||
polling = data['pollingIntervalMillis']/1000
|
||||
for c in data['items']:
|
||||
if c.get('snippet'):
|
||||
print(f"[{c['authorDetails']['displayName']}]"
|
||||
f"-{c['snippet']['displayMessage']}")
|
||||
time.sleep(polling/len(data['items']))
|
||||
except KeyboardInterrupt:
|
||||
chat.terminate()
|
||||
for c in chat.get().sync_items():
|
||||
print(f"{c.datetime} [{c.author.name}]- {c.message}")
|
||||
```
|
||||
### replay:
|
||||
If specified video is not live,
|
||||
automatically try to fetch archived chat data.
|
||||
|
||||
|
||||
### Output JSON format string (feature of [DefaultProcessor](https://github.com/taizan-hokuto/pytchat/wiki/DefaultProcessor))
|
||||
```python
|
||||
from pytchat import LiveChat
|
||||
import pytchat
|
||||
import time
|
||||
|
||||
def main():
|
||||
#seektime (seconds): start position of chat.
|
||||
chat = LiveChat("ojes5ULOqhc", seektime = 60*30)
|
||||
print('Replay from 30:00')
|
||||
try:
|
||||
while chat.is_alive():
|
||||
data = chat.get()
|
||||
for c in data.items:
|
||||
print(f"{c.elapsedTime} [{c.author.name}]-{c.message} {c.amountString}")
|
||||
data.tick()
|
||||
except KeyboardInterrupt:
|
||||
chat.terminate()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
chat = pytchat.create(video_id="uIx8l2xlYVY")
|
||||
while chat.is_alive():
|
||||
print(chat.get().json())
|
||||
time.sleep(5)
|
||||
'''
|
||||
# Each chat item can also be output in JSON format.
|
||||
for c in chat.get().items:
|
||||
print(c.json())
|
||||
'''
|
||||
```
|
||||
|
||||
|
||||
### other
|
||||
+ Fetch chat with a buffer ([LiveChat](https://github.com/taizan-hokuto/pytchat/wiki/LiveChat))
|
||||
|
||||
+ Use with asyncio ([LiveChatAsync](https://github.com/taizan-hokuto/pytchat/wiki/LiveChatAsync))
|
||||
|
||||
+ YT API compatible chat processor ([CompatibleProcessor](https://github.com/taizan-hokuto/pytchat/wiki/CompatibleProcessor))
|
||||
|
||||
|
||||
## Structure of Default Processor
|
||||
Each item can be got with `items` function.
|
||||
Each item can be got with `sync_items()` function.
|
||||
<table>
|
||||
<tr>
|
||||
<th>name</th>
|
||||
@@ -175,7 +86,7 @@ Each item can be got with `items` function.
|
||||
<tr>
|
||||
<td>messageEx</td>
|
||||
<td>str</td>
|
||||
<td>list of message texts and emoji URLs.</td>
|
||||
<td>list of message texts and emoji dicts(id, txt, url).</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>timestamp</td>
|
||||
@@ -277,16 +188,3 @@ Structure of author object.
|
||||
[](LICENSE)
|
||||
|
||||
|
||||
## Contributes
|
||||
Great thanks:
|
||||
|
||||
Most of source code of CLI refer to:
|
||||
|
||||
[PetterKraabol / Twitch-Chat-Downloader](https://github.com/PetterKraabol/Twitch-Chat-Downloader)
|
||||
|
||||
|
||||
## Author
|
||||
|
||||
[taizan-hokuto](https://github.com/taizan-hokuto)
|
||||
|
||||
[twitter:@taizan205](https://twitter.com/taizan205)
|
||||
|
||||
@@ -1,17 +1,31 @@
|
||||
"""
|
||||
pytchat is a python library for fetching youtube live chat without using yt api, Selenium, or BeautifulSoup.
|
||||
pytchat is a lightweight python library to browse youtube livechat without Selenium or BeautifulSoup.
|
||||
"""
|
||||
__copyright__ = 'Copyright (C) 2019 taizan-hokuto'
|
||||
__version__ = '0.0.6.4'
|
||||
__copyright__ = 'Copyright (C) 2019, 2020 taizan-hokuto'
|
||||
__version__ = '0.5.4'
|
||||
__license__ = 'MIT'
|
||||
__author__ = 'taizan-hokuto'
|
||||
__author_email__ = '55448286+taizan-hokuto@users.noreply.github.com'
|
||||
__url__ = 'https://github.com/taizan-hokuto/pytchat'
|
||||
|
||||
__all__ = ["core_async","core_multithread","processors"]
|
||||
|
||||
from .exceptions import (
|
||||
ChatParseException,
|
||||
ResponseContextError,
|
||||
NoContents,
|
||||
NoContinuation,
|
||||
IllegalFunctionCall,
|
||||
InvalidVideoIdException,
|
||||
UnknownConnectionError,
|
||||
RetryExceedMaxCount,
|
||||
ChatDataFinished,
|
||||
ReceivedUnknownContinuation,
|
||||
FailedExtractContinuation,
|
||||
VideoInfoParseError,
|
||||
PatternUnmatchError
|
||||
)
|
||||
|
||||
from .api import (
|
||||
cli,
|
||||
config,
|
||||
LiveChat,
|
||||
LiveChatAsync,
|
||||
@@ -19,12 +33,12 @@ from .api import (
|
||||
CompatibleProcessor,
|
||||
DummyProcessor,
|
||||
DefaultProcessor,
|
||||
Extractor,
|
||||
HTMLArchiver,
|
||||
TSVArchiver,
|
||||
JsonfileArchiver,
|
||||
SimpleDisplayProcessor,
|
||||
SpeedCalculator,
|
||||
SuperchatCalculator,
|
||||
VideoInfo
|
||||
create
|
||||
)
|
||||
# flake8: noqa
|
||||
@@ -1,5 +1,5 @@
|
||||
from . import cli
|
||||
from . import config
|
||||
from .core import create
|
||||
from .core_multithread.livechat import LiveChat
|
||||
from .core_async.livechat import LiveChatAsync
|
||||
from .processors.chat_processor import ChatProcessor
|
||||
@@ -12,5 +12,23 @@ from .processors.jsonfile_archiver import JsonfileArchiver
|
||||
from .processors.simple_display_processor import SimpleDisplayProcessor
|
||||
from .processors.speed.calculator import SpeedCalculator
|
||||
from .processors.superchat.calculator import SuperchatCalculator
|
||||
from .tool.extract.extractor import Extractor
|
||||
from .tool.videoinfo import VideoInfo
|
||||
|
||||
|
||||
__all__ = [
|
||||
config,
|
||||
LiveChat,
|
||||
LiveChatAsync,
|
||||
ChatProcessor,
|
||||
CompatibleProcessor,
|
||||
DummyProcessor,
|
||||
DefaultProcessor,
|
||||
HTMLArchiver,
|
||||
TSVArchiver,
|
||||
JsonfileArchiver,
|
||||
SimpleDisplayProcessor,
|
||||
SpeedCalculator,
|
||||
SuperchatCalculator,
|
||||
create
|
||||
]
|
||||
|
||||
# flake8: noqa
|
||||
@@ -1,51 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List, Callable
|
||||
from .arguments import Arguments
|
||||
|
||||
from .. exceptions import InvalidVideoIdException, NoContentsException
|
||||
from .. processors.tsv_archiver import TSVArchiver
|
||||
from .. processors.html_archiver import HTMLArchiver
|
||||
from .. tool.extract.extractor import Extractor
|
||||
from .. tool.videoinfo import VideoInfo
|
||||
from .. import __version__
|
||||
|
||||
'''
|
||||
Most of CLI modules refer to
|
||||
Petter Kraabøl's Twitch-Chat-Downloader
|
||||
https://github.com/PetterKraabol/Twitch-Chat-Downloader
|
||||
(MIT License)
|
||||
|
||||
'''
|
||||
def main():
|
||||
# Arguments
|
||||
parser = argparse.ArgumentParser(description=f'pytchat v{__version__}')
|
||||
parser.add_argument('-v', f'--{Arguments.Name.VIDEO}', type=str,
|
||||
help='Video IDs separated by commas without space')
|
||||
parser.add_argument('-o', f'--{Arguments.Name.OUTPUT}', type=str,
|
||||
help='Output directory (end with "/")', default='./')
|
||||
parser.add_argument(f'--{Arguments.Name.VERSION}', action='store_true',
|
||||
help='Settings version')
|
||||
Arguments(parser.parse_args().__dict__)
|
||||
if Arguments().print_version:
|
||||
print(f'pytchat v{__version__}')
|
||||
return
|
||||
|
||||
# Extractor
|
||||
if Arguments().video_ids:
|
||||
for video_id in Arguments().video_ids:
|
||||
try:
|
||||
info = VideoInfo(video_id)
|
||||
print(f"Extracting...\n"
|
||||
f" video_id: {video_id}\n"
|
||||
f" channel: {info.get_channel_name()}\n"
|
||||
f" title: {info.get_title()}")
|
||||
Extractor(video_id,
|
||||
processor = HTMLArchiver(Arguments().output+video_id+'.html')
|
||||
).extract()
|
||||
print("Extraction end.\n")
|
||||
except (InvalidVideoIdException, NoContentsException) as e:
|
||||
print(e)
|
||||
return
|
||||
parser.print_help()
|
||||
@@ -1,39 +0,0 @@
|
||||
from typing import Optional, Dict, Union, List
|
||||
from .singleton import Singleton
|
||||
|
||||
'''
|
||||
This modules refer to
|
||||
Petter Kraabøl's Twitch-Chat-Downloader
|
||||
https://github.com/PetterKraabol/Twitch-Chat-Downloader
|
||||
(MIT License)
|
||||
'''
|
||||
|
||||
class Arguments(metaclass=Singleton):
|
||||
"""
|
||||
Arguments singleton
|
||||
"""
|
||||
|
||||
class Name:
|
||||
VERSION: str = 'version'
|
||||
OUTPUT: str = 'output'
|
||||
VIDEO: str = 'video'
|
||||
|
||||
def __init__(self,
|
||||
arguments: Optional[Dict[str, Union[str, bool, int]]] = None):
|
||||
"""
|
||||
Initialize arguments
|
||||
:param arguments: Arguments from cli
|
||||
(Optional to call singleton instance without parameters)
|
||||
"""
|
||||
|
||||
if arguments is None:
|
||||
print('Error: arguments were not provided')
|
||||
exit()
|
||||
|
||||
self.print_version: bool = arguments[Arguments.Name.VERSION]
|
||||
self.output: str = arguments[Arguments.Name.OUTPUT]
|
||||
self.video_ids: List[int] = []
|
||||
# Videos
|
||||
if arguments[Arguments.Name.VIDEO]:
|
||||
self.video_ids = [video_id
|
||||
for video_id in arguments[Arguments.Name.VIDEO].split(',')]
|
||||
@@ -1,19 +0,0 @@
|
||||
'''
|
||||
This modules refer to
|
||||
Petter Kraabøl's Twitch-Chat-Downloader
|
||||
https://github.com/PetterKraabol/Twitch-Chat-Downloader
|
||||
(MIT License)
|
||||
'''
|
||||
class Singleton(type):
|
||||
"""
|
||||
Abstract class for singletons
|
||||
"""
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super().__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
def get_instance(cls, *args, **kwargs):
|
||||
cls.__call__(*args, **kwargs)
|
||||
@@ -1,11 +1,16 @@
|
||||
import logging
|
||||
import logging # noqa
|
||||
from . import mylogger
|
||||
|
||||
from base64 import a85decode as dc
|
||||
headers = {
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36'}
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36 Edg/86.0.622.63,gzip(gfe)',
|
||||
}
|
||||
m_headers = {
|
||||
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Mobile Safari/537.36 Edg/91.0.864.59',
|
||||
}
|
||||
_sml = dc(b"BQS?8F#ks-GB\\6`H#IhIF^eo7@rH3;H#IhIF^eor06T''Ch\\'(?XmbXF>%9<FC/iuG%G#jBOQ!ICLqcS5tQB2;gCZ)?UdXC;f$GR3)MM2<(0>O7mh!,G@+K5?SO9T@okV").decode()
|
||||
_smr = dc(b"BQS?8F#ks-GB\\6`H#IhIF^eo7@rH3;H#IhIF^eor06T''Ch\\'(?XmbXF>%9<FC/iuG%G#jBOQ!iEb03+@<k(QAU-F)8U=fDGsP557S5F7CiNH7;)D3N77^*B6YU@\\?WfBr0emZX=#^").decode()
|
||||
|
||||
def logger(module_name: str, loglevel = None):
|
||||
module_logger = mylogger.get_logger(module_name, loglevel = loglevel)
|
||||
|
||||
def logger(module_name: str, loglevel=None):
|
||||
module_logger = mylogger.get_logger(module_name, loglevel=loglevel)
|
||||
return module_logger
|
||||
|
||||
|
||||
|
||||
@@ -1,31 +1,31 @@
|
||||
from logging import NullHandler, getLogger, StreamHandler, FileHandler, Formatter
|
||||
from logging import NullHandler, getLogger, StreamHandler, FileHandler
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def get_logger(modname,loglevel=logging.DEBUG):
|
||||
def get_logger(modname, loglevel=logging.DEBUG):
|
||||
logger = getLogger(modname)
|
||||
if loglevel == None:
|
||||
if loglevel is None:
|
||||
logger.addHandler(NullHandler())
|
||||
return logger
|
||||
logger.setLevel(loglevel)
|
||||
#create handler1 for showing info
|
||||
# create handler1 for showing info
|
||||
handler1 = StreamHandler()
|
||||
my_formatter = MyFormatter()
|
||||
handler1.setFormatter(my_formatter)
|
||||
|
||||
handler1.setLevel(loglevel)
|
||||
logger.addHandler(handler1)
|
||||
#create handler2 for recording log file
|
||||
# create handler2 for recording log file
|
||||
if loglevel <= logging.DEBUG:
|
||||
handler2 = FileHandler(filename="log.txt", encoding='utf-8')
|
||||
handler2.setLevel(logging.ERROR)
|
||||
handler2.setFormatter(my_formatter)
|
||||
|
||||
|
||||
logger.addHandler(handler2)
|
||||
return logger
|
||||
|
||||
|
||||
class MyFormatter(logging.Formatter):
|
||||
def format(self, record):
|
||||
timestamp = (
|
||||
@@ -35,4 +35,4 @@ class MyFormatter(logging.Formatter):
|
||||
lineno = str(record.lineno).rjust(4)
|
||||
message = record.getMessage()
|
||||
|
||||
return timestamp+'| '+module+' { '+funcname+':'+lineno+'} - '+message
|
||||
return timestamp + '| ' + module + ' { ' + funcname + ':' + lineno + '} - ' + message
|
||||
|
||||
7
pytchat/core/__init__.py
Normal file
7
pytchat/core/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from .pytchat import PytchatCore
|
||||
from .. util import extract_video_id
|
||||
|
||||
|
||||
def create(video_id: str, **kwargs):
|
||||
_vid = extract_video_id(video_id)
|
||||
return PytchatCore(_vid, **kwargs)
|
||||
216
pytchat/core/pytchat.py
Normal file
216
pytchat/core/pytchat.py
Normal file
@@ -0,0 +1,216 @@
|
||||
import httpx
|
||||
import json
|
||||
import signal
|
||||
import time
|
||||
import traceback
|
||||
from ..parser.live import Parser
|
||||
from .. import config
|
||||
from .. import exceptions
|
||||
from ..paramgen import liveparam, arcparam
|
||||
from ..processors.default.processor import DefaultProcessor
|
||||
from ..processors.combinator import Combinator
|
||||
from .. import util
|
||||
|
||||
headers = config.headers
|
||||
MAX_RETRY = 10
|
||||
|
||||
class PytchatCore:
|
||||
'''
|
||||
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
|
||||
seektime : int
|
||||
start position of fetching chat (seconds).
|
||||
This option is valid for archived chat only.
|
||||
If negative value, chat data posted before the start of the broadcast
|
||||
will be retrieved as well.
|
||||
|
||||
processor : ChatProcessor
|
||||
|
||||
interruptable : bool
|
||||
Allows keyboard interrupts.
|
||||
Set this parameter to False if your own threading program causes
|
||||
the problem.
|
||||
|
||||
force_replay : bool
|
||||
force to fetch archived chat data, even if specified video is live.
|
||||
|
||||
topchat_only : bool
|
||||
If True, get only top chat.
|
||||
|
||||
hold_exception : bool [default:True]
|
||||
If True, when exceptions occur, the exception is held internally,
|
||||
and can be raised by raise_for_status().
|
||||
|
||||
replay_continuation : str
|
||||
If this parameter is not None, the processor will attempt to get chat data from continuation.
|
||||
This parameter is only allowed in archived mode.
|
||||
|
||||
Attributes
|
||||
---------
|
||||
_is_alive : bool
|
||||
Flag to stop getting chat.
|
||||
'''
|
||||
|
||||
def __init__(self, video_id,
|
||||
seektime=-1,
|
||||
processor=DefaultProcessor(),
|
||||
interruptable=True,
|
||||
force_replay=False,
|
||||
topchat_only=False,
|
||||
hold_exception=True,
|
||||
logger=config.logger(__name__),
|
||||
replay_continuation=None
|
||||
):
|
||||
self._video_id = util.extract_video_id(video_id)
|
||||
self.seektime = seektime
|
||||
if isinstance(processor, tuple):
|
||||
self.processor = Combinator(processor)
|
||||
else:
|
||||
self.processor = processor
|
||||
self._is_alive = True
|
||||
self._is_replay = force_replay or (replay_continuation is not None)
|
||||
self._hold_exception = hold_exception
|
||||
self._exception_holder = None
|
||||
self._parser = Parser(
|
||||
is_replay=self._is_replay,
|
||||
exception_holder=self._exception_holder
|
||||
)
|
||||
self._first_fetch = replay_continuation is None
|
||||
self._fetch_url = config._sml if replay_continuation is None else config._smr
|
||||
self._topchat_only = topchat_only
|
||||
self._dat = ''
|
||||
self._last_offset_ms = 0
|
||||
self._logger = logger
|
||||
self.continuation = replay_continuation
|
||||
if interruptable:
|
||||
signal.signal(signal.SIGINT, lambda a, b: self.terminate())
|
||||
self._setup()
|
||||
|
||||
def _setup(self):
|
||||
if not self.continuation:
|
||||
time.sleep(0.1) # sleep shortly to prohibit skipping fetching data
|
||||
"""Fetch first continuation parameter,
|
||||
create and start _listen loop.
|
||||
"""
|
||||
self.continuation = liveparam.getparam(
|
||||
self._video_id,
|
||||
channel_id=util.get_channelid(httpx.Client(http2=True), self._video_id),
|
||||
past_sec=3)
|
||||
|
||||
def _get_chat_component(self):
|
||||
''' Fetch chat data and store them into buffer,
|
||||
get next continuaiton parameter and loop.
|
||||
|
||||
Parameter
|
||||
---------
|
||||
continuation : str
|
||||
parameter for next chat data
|
||||
'''
|
||||
try:
|
||||
with httpx.Client(http2=True) as client:
|
||||
if self.continuation and self._is_alive:
|
||||
contents = self._get_contents(self.continuation, client, headers)
|
||||
metadata, chatdata = self._parser.parse(contents)
|
||||
timeout = metadata['timeoutMs'] / 1000
|
||||
chat_component = {
|
||||
"video_id": self._video_id,
|
||||
"timeout": timeout,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
self.continuation = metadata.get('continuation')
|
||||
self._last_offset_ms = metadata.get('last_offset_ms', 0)
|
||||
return chat_component
|
||||
except exceptions.ChatParseException as e:
|
||||
self._logger.debug(f"[{self._video_id}]{str(e)}")
|
||||
self._raise_exception(e)
|
||||
except Exception as e:
|
||||
self._logger.error(f"{traceback.format_exc(limit=-1)}")
|
||||
self._raise_exception(e)
|
||||
|
||||
def _get_contents(self, continuation, client, headers):
|
||||
'''Get 'continuationContents' from livechat json.
|
||||
If contents is None at first fetching,
|
||||
try to fetch archive chat data.
|
||||
|
||||
Return:
|
||||
-------
|
||||
'continuationContents' which includes metadata & chat data.
|
||||
'''
|
||||
livechat_json = (
|
||||
self._get_livechat_json(continuation, client, replay=self._is_replay, offset_ms=self._last_offset_ms)
|
||||
)
|
||||
contents, dat = self._parser.get_contents(livechat_json)
|
||||
if self._dat == '' and dat:
|
||||
self._dat = dat
|
||||
if self._first_fetch:
|
||||
if contents is None or self._is_replay:
|
||||
'''Try to fetch archive chat data.'''
|
||||
self._parser.is_replay = True
|
||||
self._fetch_url = config._smr
|
||||
continuation = arcparam.getparam(
|
||||
self._video_id, self.seektime, self._topchat_only, util.get_channelid(client, self._video_id))
|
||||
livechat_json = self._get_livechat_json(continuation, client, replay=True, offset_ms=self.seektime * 1000)
|
||||
reload_continuation = self._parser.reload_continuation(
|
||||
self._parser.get_contents(livechat_json)[0])
|
||||
if reload_continuation:
|
||||
livechat_json = (self._get_livechat_json(
|
||||
reload_continuation, client, headers))
|
||||
contents, _ = self._parser.get_contents(livechat_json)
|
||||
self._is_replay = True
|
||||
self._first_fetch = False
|
||||
return contents
|
||||
|
||||
def _get_livechat_json(self, continuation, client, replay: bool, offset_ms: int = 0):
|
||||
'''
|
||||
Get json which includes chat data.
|
||||
'''
|
||||
livechat_json = None
|
||||
err = None
|
||||
if offset_ms < 0:
|
||||
offset_ms = 0
|
||||
param = util.get_param(continuation, dat=self._dat, replay=replay, offsetms=offset_ms)
|
||||
for _ in range(MAX_RETRY + 1):
|
||||
with httpx.Client(http2=True) as client:
|
||||
try:
|
||||
response = client.post(self._fetch_url, json=param)
|
||||
livechat_json = json.loads(response.text)
|
||||
break
|
||||
except (json.JSONDecodeError, httpx.ConnectTimeout, httpx.ReadTimeout, httpx.ConnectError) as e:
|
||||
err = e
|
||||
time.sleep(2)
|
||||
continue
|
||||
else:
|
||||
self._logger.error(f"[{self._video_id}]"
|
||||
f"Exceeded retry count. Last error: {str(err)}")
|
||||
self._raise_exception(exceptions.RetryExceedMaxCount())
|
||||
return livechat_json
|
||||
|
||||
def get(self):
|
||||
if self.is_alive():
|
||||
chat_component = self._get_chat_component()
|
||||
return self.processor.process([chat_component])
|
||||
else:
|
||||
return []
|
||||
|
||||
def is_replay(self):
|
||||
return self._is_replay
|
||||
|
||||
def is_alive(self):
|
||||
return self._is_alive
|
||||
|
||||
def terminate(self):
|
||||
self._is_alive = False
|
||||
self.processor.finalize()
|
||||
|
||||
def raise_for_status(self):
|
||||
if self._exception_holder is not None:
|
||||
raise self._exception_holder
|
||||
|
||||
def _raise_exception(self, exception: Exception = None):
|
||||
self.terminate()
|
||||
if self._hold_exception is False:
|
||||
raise exception
|
||||
self._exception_holder = exception
|
||||
@@ -1,26 +1,29 @@
|
||||
|
||||
import asyncio
|
||||
|
||||
|
||||
class Buffer(asyncio.Queue):
|
||||
'''
|
||||
チャットデータを格納するバッファの役割を持つFIFOキュー
|
||||
Buffer for storing chat data.
|
||||
|
||||
Parameter
|
||||
---------
|
||||
maxsize : int
|
||||
格納するチャットブロックの最大個数。0の場合は無限。
|
||||
最大値を超える場合は古いチャットブロックから破棄される。
|
||||
Maximum number of chat blocks to be stored.
|
||||
If it exceeds the maximum, the oldest chat block will be discarded.
|
||||
'''
|
||||
def __init__(self,maxsize = 0):
|
||||
|
||||
def __init__(self, maxsize=0):
|
||||
super().__init__(maxsize)
|
||||
|
||||
async def put(self,item):
|
||||
async def put(self, item):
|
||||
if item is None:
|
||||
return
|
||||
if super().full():
|
||||
super().get_nowait()
|
||||
await super().put(item)
|
||||
|
||||
def put_nowait(self,item):
|
||||
def put_nowait(self, item):
|
||||
if item is None:
|
||||
return
|
||||
if super().full():
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
import aiohttp, asyncio
|
||||
import datetime
|
||||
|
||||
import asyncio
|
||||
import httpx
|
||||
import json
|
||||
import random
|
||||
import signal
|
||||
import time
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from concurrent.futures import CancelledError
|
||||
from asyncio import Queue
|
||||
from concurrent.futures import CancelledError
|
||||
from .buffer import Buffer
|
||||
from ..parser.live import Parser
|
||||
from .. import config
|
||||
from ..exceptions import ChatParseException,IllegalFunctionCall
|
||||
from .. import exceptions
|
||||
from .. import util
|
||||
from ..paramgen import liveparam, arcparam
|
||||
from ..processors.default.processor import DefaultProcessor
|
||||
from ..processors.combinator import Combinator
|
||||
@@ -22,72 +21,74 @@ MAX_RETRY = 10
|
||||
|
||||
|
||||
class LiveChatAsync:
|
||||
'''asyncio(aiohttp)を利用してYouTubeのライブ配信のチャットデータを取得する。
|
||||
'''LiveChatAsync object fetches chat data and stores them
|
||||
in a buffer with asyncio.
|
||||
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
動画ID
|
||||
|
||||
seektime : int
|
||||
(ライブチャット取得時は無視)
|
||||
取得開始するアーカイブ済みチャットの経過時間(秒)
|
||||
マイナス値を指定した場合は、配信開始前のチャットも取得する。
|
||||
start position of fetching chat (seconds).
|
||||
This option is valid for archived chat only.
|
||||
If negative value, chat data posted before the start of the broadcast
|
||||
will be retrieved as well.
|
||||
|
||||
processor : ChatProcessor
|
||||
チャットデータを加工するオブジェクト
|
||||
|
||||
buffer : Buffer(maxsize:20[default])
|
||||
チャットデータchat_componentを格納するバッファ。
|
||||
maxsize : 格納できるchat_componentの個数
|
||||
default値20個。1個で約5~10秒分。
|
||||
buffer : Buffer
|
||||
buffer of chat data fetched background.
|
||||
|
||||
interruptable : bool
|
||||
Ctrl+Cによる処理中断を行うかどうか。
|
||||
Allows keyboard interrupts.
|
||||
Set this parameter to False if your own threading program causes
|
||||
the problem.
|
||||
|
||||
callback : func
|
||||
_listen()関数から一定間隔で自動的に呼びだす関数。
|
||||
function called periodically from _listen().
|
||||
|
||||
done_callback : func
|
||||
listener終了時に呼び出すコールバック。
|
||||
function called when listener ends.
|
||||
|
||||
exception_handler : func
|
||||
例外を処理する関数
|
||||
|
||||
direct_mode : bool
|
||||
Trueの場合、bufferを使わずにcallbackを呼ぶ。
|
||||
Trueの場合、callbackの設定が必須
|
||||
(設定していない場合IllegalFunctionCall例外を発生させる)
|
||||
If True, invoke specified callback function without using buffer.
|
||||
callback is required. If not, IllegalFunctionCall will be raised.
|
||||
|
||||
force_replay : bool
|
||||
Trueの場合、ライブチャットが取得できる場合であっても
|
||||
強制的にアーカイブ済みチャットを取得する。
|
||||
force to fetch archived chat data, even if specified video is live.
|
||||
|
||||
topchat_only : bool
|
||||
Trueの場合、上位チャットのみ取得する。
|
||||
If True, get only top chat.
|
||||
|
||||
replay_continuation : str
|
||||
If this parameter is not None, the processor will attempt to get chat data from continuation.
|
||||
This parameter is only allowed in archived mode.
|
||||
|
||||
Attributes
|
||||
---------
|
||||
_is_alive : bool
|
||||
チャット取得を停止するためのフラグ
|
||||
Flag to stop getting chat.
|
||||
'''
|
||||
|
||||
_setup_finished = False
|
||||
|
||||
def __init__(self, video_id,
|
||||
seektime = 0,
|
||||
processor = DefaultProcessor(),
|
||||
buffer = None,
|
||||
interruptable = True,
|
||||
callback = None,
|
||||
done_callback = None,
|
||||
exception_handler = None,
|
||||
direct_mode = False,
|
||||
force_replay = False,
|
||||
topchat_only = False,
|
||||
logger = config.logger(__name__),
|
||||
seektime=-1,
|
||||
processor=DefaultProcessor(),
|
||||
buffer=None,
|
||||
interruptable=True,
|
||||
callback=None,
|
||||
done_callback=None,
|
||||
exception_handler=None,
|
||||
direct_mode=False,
|
||||
force_replay=False,
|
||||
topchat_only=False,
|
||||
logger=config.logger(__name__),
|
||||
replay_continuation=None
|
||||
):
|
||||
self.video_id = video_id
|
||||
self._video_id = util.extract_video_id(video_id)
|
||||
self.seektime = seektime
|
||||
if isinstance(processor, tuple):
|
||||
self.processor = Combinator(processor)
|
||||
@@ -99,59 +100,64 @@ class LiveChatAsync:
|
||||
self._exception_handler = exception_handler
|
||||
self._direct_mode = direct_mode
|
||||
self._is_alive = True
|
||||
self._is_replay = force_replay
|
||||
self._parser = Parser(is_replay = self._is_replay)
|
||||
self._is_replay = force_replay or (replay_continuation is not None)
|
||||
self._parser = Parser(is_replay=self._is_replay)
|
||||
self._pauser = Queue()
|
||||
self._pauser.put_nowait(None)
|
||||
self._setup()
|
||||
self._first_fetch = True
|
||||
self._fetch_url = "live_chat/get_live_chat?continuation="
|
||||
self._first_fetch = replay_continuation is None
|
||||
self._fetch_url = config._sml if replay_continuation is None else config._smr
|
||||
self._topchat_only = topchat_only
|
||||
self._dat = ''
|
||||
self._last_offset_ms = 0
|
||||
self._logger = logger
|
||||
self.exception = None
|
||||
self.continuation = replay_continuation
|
||||
LiveChatAsync._logger = logger
|
||||
|
||||
if not LiveChatAsync._setup_finished:
|
||||
LiveChatAsync._setup_finished = True
|
||||
if exception_handler:
|
||||
self._set_exception_handler(exception_handler)
|
||||
if interruptable:
|
||||
signal.signal(signal.SIGINT,
|
||||
(lambda a, b:asyncio.create_task(
|
||||
LiveChatAsync.shutdown(None,signal.SIGINT,b))
|
||||
))
|
||||
(lambda a, b: self._keyboard_interrupt()))
|
||||
self._setup()
|
||||
|
||||
def _setup(self):
|
||||
#direct modeがTrueでcallback未設定の場合例外発生。
|
||||
# An exception is raised when direct mode is true and no callback is set.
|
||||
if self._direct_mode:
|
||||
if self._callback is None:
|
||||
raise IllegalFunctionCall(
|
||||
raise exceptions.IllegalFunctionCall(
|
||||
"When direct_mode=True, callback parameter is required.")
|
||||
else:
|
||||
#direct modeがFalseでbufferが未設定ならばデフォルトのbufferを作成
|
||||
# Create a default buffer if `direct_mode` is False and buffer is not set.
|
||||
if self._buffer is None:
|
||||
self._buffer = Buffer(maxsize = 20)
|
||||
#callbackが指定されている場合はcallbackを呼ぶループタスクを作成
|
||||
self._buffer = Buffer(maxsize=20)
|
||||
# Create a loop task to call callback if the `callback` param is specified.
|
||||
if self._callback is None:
|
||||
pass
|
||||
else:
|
||||
#callbackを呼ぶループタスクの開始
|
||||
# Create a loop task to call callback if the `callback` param is specified.
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(self._callback_loop(self._callback))
|
||||
#_listenループタスクの開始
|
||||
# Start a loop task for _listen()
|
||||
loop = asyncio.get_event_loop()
|
||||
listen_task = loop.create_task(self._startlisten())
|
||||
#add_done_callbackの登録
|
||||
self.listen_task = loop.create_task(self._startlisten())
|
||||
# Register add_done_callback
|
||||
if self._done_callback is None:
|
||||
listen_task.add_done_callback(self.finish)
|
||||
self.listen_task.add_done_callback(self._finish)
|
||||
else:
|
||||
listen_task.add_done_callback(self._done_callback)
|
||||
self.listen_task.add_done_callback(self._done_callback)
|
||||
|
||||
async def _startlisten(self):
|
||||
"""Fetch first continuation parameter,
|
||||
create and start _listen loop.
|
||||
"""
|
||||
initial_continuation = liveparam.getparam(self.video_id,3)
|
||||
await self._listen(initial_continuation)
|
||||
if not self.continuation:
|
||||
self.continuation = liveparam.getparam(
|
||||
self._video_id,
|
||||
channel_id=util.get_channelid(httpx.Client(http2=True), self._video_id),
|
||||
past_sec=3)
|
||||
|
||||
await self._listen(self.continuation)
|
||||
|
||||
async def _listen(self, continuation):
|
||||
''' Fetch chat data and store them into buffer,
|
||||
@@ -163,39 +169,41 @@ class LiveChatAsync:
|
||||
parameter for next chat data
|
||||
'''
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with httpx.AsyncClient(http2=True) as client:
|
||||
while(continuation and self._is_alive):
|
||||
continuation = await self._check_pause(continuation)
|
||||
contents = await self._get_contents(
|
||||
continuation, session, headers)
|
||||
contents = await self._get_contents(continuation, client, headers)
|
||||
metadata, chatdata = self._parser.parse(contents)
|
||||
|
||||
timeout = metadata['timeoutMs']/1000
|
||||
continuation = metadata.get('continuation')
|
||||
if continuation:
|
||||
self.continuation = continuation
|
||||
timeout = metadata['timeoutMs'] / 1000
|
||||
chat_component = {
|
||||
"video_id" : self.video_id,
|
||||
"timeout" : timeout,
|
||||
"chatdata" : chatdata
|
||||
"video_id": self._video_id,
|
||||
"timeout": timeout,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
time_mark =time.time()
|
||||
time_mark = time.time()
|
||||
if self._direct_mode:
|
||||
processed_chat = self.processor.process([chat_component])
|
||||
if isinstance(processed_chat,tuple):
|
||||
processed_chat = self.processor.process(
|
||||
[chat_component])
|
||||
if isinstance(processed_chat, tuple):
|
||||
await self._callback(*processed_chat)
|
||||
else:
|
||||
await self._callback(processed_chat)
|
||||
else:
|
||||
await self._buffer.put(chat_component)
|
||||
diff_time = timeout - (time.time()-time_mark)
|
||||
diff_time = timeout - (time.time() - time_mark)
|
||||
await asyncio.sleep(diff_time)
|
||||
continuation = metadata.get('continuation')
|
||||
except ChatParseException as e:
|
||||
self._logger.debug(f"[{self.video_id}]{str(e)}")
|
||||
return
|
||||
except (TypeError , json.JSONDecodeError) :
|
||||
self._logger.error(f"{traceback.format_exc(limit = -1)}")
|
||||
return
|
||||
self._last_offset_ms = metadata.get('last_offset_ms', 0)
|
||||
except exceptions.ChatParseException as e:
|
||||
self._logger.debug(f"[{self._video_id}]{str(e)}")
|
||||
raise
|
||||
except Exception:
|
||||
self._logger.error(f"{traceback.format_exc(limit=-1)}")
|
||||
raise
|
||||
|
||||
self._logger.debug(f"[{self.video_id}]finished fetching chat.")
|
||||
self._logger.debug(f"[{self._video_id}] finished fetching chat.")
|
||||
|
||||
async def _check_pause(self, continuation):
|
||||
if self._pauser.empty():
|
||||
@@ -206,11 +214,14 @@ class LiveChatAsync:
|
||||
'''
|
||||
self._pauser.put_nowait(None)
|
||||
if not self._is_replay:
|
||||
continuation = liveparam.getparam(
|
||||
self.video_id, 3, self._topchat_only)
|
||||
async with httpx.AsyncClient(http2=True) as client:
|
||||
continuation = await liveparam.getparam(self._video_id,
|
||||
channel_id=util.get_channelid_async(client, self.video_id),
|
||||
past_sec=3)
|
||||
|
||||
return continuation
|
||||
|
||||
async def _get_contents(self, continuation, session, headers):
|
||||
async def _get_contents(self, continuation, client, headers):
|
||||
'''Get 'continuationContents' from livechat json.
|
||||
If contents is None at first fetching,
|
||||
try to fetch archive chat data.
|
||||
@@ -219,60 +230,61 @@ class LiveChatAsync:
|
||||
-------
|
||||
'continuationContents' which includes metadata & chatdata.
|
||||
'''
|
||||
livechat_json = (await
|
||||
self._get_livechat_json(continuation, session, headers)
|
||||
)
|
||||
contents = self._parser.get_contents(livechat_json)
|
||||
livechat_json = await self._get_livechat_json(continuation, client, replay=self._is_replay, offset_ms=self._last_offset_ms)
|
||||
contents, dat = self._parser.get_contents(livechat_json)
|
||||
if self._dat == '' and dat:
|
||||
self._dat = dat
|
||||
if self._first_fetch:
|
||||
if contents is None or self._is_replay:
|
||||
'''Try to fetch archive chat data.'''
|
||||
self._parser.is_replay = True
|
||||
self._fetch_url = "live_chat_replay/get_live_chat_replay?continuation="
|
||||
self._fetch_url = config._smr
|
||||
channelid = await util.get_channelid_async(client, self._video_id)
|
||||
continuation = arcparam.getparam(
|
||||
self.video_id, self.seektime, self._topchat_only)
|
||||
self._video_id, self.seektime, self._topchat_only, channelid)
|
||||
livechat_json = (await self._get_livechat_json(
|
||||
continuation, session, headers))
|
||||
continuation, client, replay=True, offset_ms=self.seektime * 1000))
|
||||
reload_continuation = self._parser.reload_continuation(
|
||||
self._parser.get_contents(livechat_json))
|
||||
self._parser.get_contents(livechat_json)[0])
|
||||
if reload_continuation:
|
||||
livechat_json = (await self._get_livechat_json(
|
||||
reload_continuation, session, headers))
|
||||
contents = self._parser.get_contents(livechat_json)
|
||||
reload_continuation, client, headers))
|
||||
contents, _ = self._parser.get_contents(livechat_json)
|
||||
self._is_replay = True
|
||||
self._first_fetch = False
|
||||
return contents
|
||||
|
||||
async def _get_livechat_json(self, continuation, session, headers):
|
||||
async def _get_livechat_json(self, continuation, client, replay: bool, offset_ms: int = 0):
|
||||
'''
|
||||
Get json which includes chat data.
|
||||
'''
|
||||
continuation = urllib.parse.quote(continuation)
|
||||
livechat_json = None
|
||||
status_code = 0
|
||||
url =f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
|
||||
if offset_ms < 0:
|
||||
offset_ms = 0
|
||||
param = util.get_param(continuation, dat=self._dat, replay=replay, offsetms=offset_ms)
|
||||
for _ in range(MAX_RETRY + 1):
|
||||
async with session.get(url ,headers = headers) as resp:
|
||||
try:
|
||||
text = await resp.text()
|
||||
livechat_json = json.loads(text)
|
||||
resp = await client.post(self._fetch_url, json=param)
|
||||
livechat_json = resp.json()
|
||||
break
|
||||
except (ClientConnectorError,json.JSONDecodeError) :
|
||||
await asyncio.sleep(1)
|
||||
except (json.JSONDecodeError, httpx.HTTPError):
|
||||
await asyncio.sleep(2)
|
||||
continue
|
||||
else:
|
||||
self._logger.error(f"[{self.video_id}]"
|
||||
f"Exceeded retry count. status_code={status_code}")
|
||||
return None
|
||||
self._logger.error(f"[{self._video_id}]"
|
||||
f"Exceeded retry count.")
|
||||
raise exceptions.RetryExceedMaxCount()
|
||||
return livechat_json
|
||||
|
||||
async def _callback_loop(self,callback):
|
||||
""" コンストラクタでcallbackを指定している場合、バックグラウンドで
|
||||
callbackに指定された関数に一定間隔でチャットデータを投げる。
|
||||
async def _callback_loop(self, callback):
|
||||
""" If a callback is specified in the constructor,
|
||||
it throws chat data at regular intervals to the
|
||||
function specified in the callback in the backgroun
|
||||
|
||||
Parameter
|
||||
---------
|
||||
callback : func
|
||||
加工済みのチャットデータを渡す先の関数。
|
||||
function to which the processed chat data is passed.
|
||||
"""
|
||||
while self.is_alive():
|
||||
items = await self._buffer.get()
|
||||
@@ -283,17 +295,22 @@ class LiveChatAsync:
|
||||
await self._callback(processed_chat)
|
||||
|
||||
async def get(self):
|
||||
""" bufferからデータを取り出し、processorに投げ、
|
||||
加工済みのチャットデータを返す。
|
||||
"""
|
||||
Retrieves data from the buffer,
|
||||
throws it to the processor,
|
||||
and returns the processed chat data.
|
||||
|
||||
Returns
|
||||
: Processorによって加工されたチャットデータ
|
||||
: Chat data processed by the Processor
|
||||
"""
|
||||
if self._callback is None:
|
||||
if self.is_alive():
|
||||
items = await self._buffer.get()
|
||||
return self.processor.process(items)
|
||||
raise IllegalFunctionCall(
|
||||
"既にcallbackを登録済みのため、get()は実行できません。")
|
||||
else:
|
||||
return []
|
||||
raise exceptions.IllegalFunctionCall(
|
||||
"Callback parameter is already set, so get() cannot be performed.")
|
||||
|
||||
def is_replay(self):
|
||||
return self._is_replay
|
||||
@@ -313,36 +330,40 @@ class LiveChatAsync:
|
||||
def is_alive(self):
|
||||
return self._is_alive
|
||||
|
||||
def finish(self,sender):
|
||||
'''Listener終了時のコールバック'''
|
||||
def _finish(self, sender):
|
||||
'''Called when the _listen() task finished.'''
|
||||
try:
|
||||
self.terminate()
|
||||
self._task_finished()
|
||||
except CancelledError:
|
||||
self._logger.debug(f'[{self.video_id}]cancelled:{sender}')
|
||||
self._logger.debug(f'[{self._video_id}] cancelled:{sender}')
|
||||
|
||||
def terminate(self):
|
||||
'''
|
||||
Listenerを終了する。
|
||||
'''
|
||||
if self._pauser.empty():
|
||||
self._pauser.put_nowait(None)
|
||||
self._is_alive = False
|
||||
if self._direct_mode == False:
|
||||
#bufferにダミーオブジェクトを入れてis_alive()を判定させる
|
||||
self._buffer.put_nowait({'chatdata':'','timeout':0})
|
||||
self._logger.info(f'[{self.video_id}]finished.')
|
||||
self._buffer.put_nowait({})
|
||||
self.processor.finalize()
|
||||
|
||||
def _keyboard_interrupt(self):
|
||||
self.exception = exceptions.ChatDataFinished()
|
||||
self.terminate()
|
||||
|
||||
def _task_finished(self):
|
||||
if self.is_alive():
|
||||
self.terminate()
|
||||
try:
|
||||
self.listen_task.result()
|
||||
except Exception as e:
|
||||
self.exception = e
|
||||
if not isinstance(e, exceptions.ChatParseException):
|
||||
self._logger.error(f'Internal exception - {type(e)}{str(e)}')
|
||||
self._logger.info(f'[{self._video_id}] finished.')
|
||||
|
||||
def raise_for_status(self):
|
||||
if self.exception is not None:
|
||||
raise self.exception
|
||||
|
||||
@classmethod
|
||||
def _set_exception_handler(cls, handler):
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.set_exception_handler(handler)
|
||||
|
||||
@classmethod
|
||||
async def shutdown(cls, event, sig = None, handler=None):
|
||||
cls._logger.debug("shutdown...")
|
||||
tasks = [t for t in asyncio.all_tasks() if t is not
|
||||
asyncio.current_task()]
|
||||
[task.cancel() for task in tasks]
|
||||
|
||||
cls._logger.debug(f"complete remaining tasks...")
|
||||
await asyncio.gather(*tasks,return_exceptions=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.stop()
|
||||
@@ -1,20 +1,22 @@
|
||||
|
||||
import queue
|
||||
|
||||
|
||||
class Buffer(queue.Queue):
|
||||
'''
|
||||
チャットデータを格納するバッファの役割を持つFIFOキュー
|
||||
Buffer for storing chat data.
|
||||
|
||||
Parameter
|
||||
---------
|
||||
max_size : int
|
||||
格納するチャットブロックの最大個数。0の場合は無限。
|
||||
最大値を超える場合は古いチャットブロックから破棄される。
|
||||
maxsize : int
|
||||
Maximum number of chat blocks to be stored.
|
||||
If it exceeds the maximum, the oldest chat block will be discarded.
|
||||
'''
|
||||
def __init__(self,maxsize = 0):
|
||||
|
||||
def __init__(self, maxsize=0):
|
||||
super().__init__(maxsize=maxsize)
|
||||
|
||||
def put(self,item):
|
||||
def put(self, item):
|
||||
if item is None:
|
||||
return
|
||||
if super().full():
|
||||
@@ -22,7 +24,7 @@ class Buffer(queue.Queue):
|
||||
else:
|
||||
super().put(item)
|
||||
|
||||
def put_nowait(self,item):
|
||||
def put_nowait(self, item):
|
||||
if item is None:
|
||||
return
|
||||
if super().full():
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
import requests
|
||||
import datetime
|
||||
import httpx
|
||||
import json
|
||||
import random
|
||||
import signal
|
||||
import time
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from concurrent.futures import CancelledError, ThreadPoolExecutor
|
||||
from queue import Queue
|
||||
from threading import Event
|
||||
from .buffer import Buffer
|
||||
from ..parser.live import Parser
|
||||
from .. import config
|
||||
from ..exceptions import ChatParseException,IllegalFunctionCall
|
||||
from .. import exceptions
|
||||
from .. import util
|
||||
from ..paramgen import liveparam, arcparam
|
||||
from ..processors.default.processor import DefaultProcessor
|
||||
from ..processors.combinator import Combinator
|
||||
@@ -21,73 +20,75 @@ MAX_RETRY = 10
|
||||
|
||||
|
||||
class LiveChat:
|
||||
''' スレッドプールを利用してYouTubeのライブ配信のチャットデータを取得する
|
||||
'''
|
||||
LiveChat object fetches chat data and stores them
|
||||
in a buffer with ThreadpoolExecutor.
|
||||
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
動画ID
|
||||
|
||||
seektime : int
|
||||
(ライブチャット取得時は無視)
|
||||
取得開始するアーカイブ済みチャットの経過時間(秒)
|
||||
マイナス値を指定した場合は、配信開始前のチャットも取得する。
|
||||
start position of fetching chat (seconds).
|
||||
This option is valid for archived chat only.
|
||||
If negative value, chat data posted before the start of the broadcast
|
||||
will be retrieved as well.
|
||||
|
||||
processor : ChatProcessor
|
||||
チャットデータを加工するオブジェクト
|
||||
|
||||
buffer : Buffer(maxsize:20[default])
|
||||
チャットデータchat_componentを格納するバッファ。
|
||||
maxsize : 格納できるchat_componentの個数
|
||||
default値20個。1個で約5~10秒分。
|
||||
buffer : Buffer
|
||||
buffer of chat data fetched background.
|
||||
|
||||
interruptable : bool
|
||||
Ctrl+Cによる処理中断を行うかどうか。
|
||||
Allows keyboard interrupts.
|
||||
Set this parameter to False if your own threading program causes
|
||||
the problem.
|
||||
|
||||
callback : func
|
||||
_listen()関数から一定間隔で自動的に呼びだす関数。
|
||||
function called periodically from _listen().
|
||||
|
||||
done_callback : func
|
||||
listener終了時に呼び出すコールバック。
|
||||
function called when listener ends.
|
||||
|
||||
direct_mode : bool
|
||||
Trueの場合、bufferを使わずにcallbackを呼ぶ。
|
||||
Trueの場合、callbackの設定が必須
|
||||
(設定していない場合IllegalFunctionCall例外を発生させる)
|
||||
If True, invoke specified callback function without using buffer.
|
||||
callback is required. If not, IllegalFunctionCall will be raised.
|
||||
|
||||
force_replay : bool
|
||||
Trueの場合、ライブチャットが取得できる場合であっても
|
||||
強制的にアーカイブ済みチャットを取得する。
|
||||
force to fetch archived chat data, even if specified video is live.
|
||||
|
||||
topchat_only : bool
|
||||
Trueの場合、上位チャットのみ取得する。
|
||||
If True, get only top chat.
|
||||
|
||||
replay_continuation : str
|
||||
If this parameter is not None, the processor will attempt to get chat data from continuation.
|
||||
This parameter is only allowed in archived mode.
|
||||
|
||||
Attributes
|
||||
---------
|
||||
_executor : ThreadPoolExecutor
|
||||
チャットデータ取得ループ(_listen)用のスレッド
|
||||
This is used for _listen() loop.
|
||||
|
||||
_is_alive : bool
|
||||
チャット取得を停止するためのフラグ
|
||||
Flag to stop getting chat.
|
||||
'''
|
||||
|
||||
_setup_finished = False
|
||||
#チャット監視中のListenerのリスト
|
||||
_listeners = []
|
||||
|
||||
def __init__(self, video_id,
|
||||
seektime = 0,
|
||||
processor = DefaultProcessor(),
|
||||
buffer = None,
|
||||
interruptable = True,
|
||||
callback = None,
|
||||
done_callback = None,
|
||||
direct_mode = False,
|
||||
force_replay = False,
|
||||
topchat_only = False,
|
||||
logger = config.logger(__name__)
|
||||
seektime=-1,
|
||||
processor=DefaultProcessor(),
|
||||
buffer=None,
|
||||
interruptable=True,
|
||||
callback=None,
|
||||
done_callback=None,
|
||||
direct_mode=False,
|
||||
force_replay=False,
|
||||
topchat_only=False,
|
||||
logger=config.logger(__name__),
|
||||
replay_continuation=None
|
||||
):
|
||||
self.video_id = video_id
|
||||
self._video_id = util.extract_video_id(video_id)
|
||||
self.seektime = seektime
|
||||
if isinstance(processor, tuple):
|
||||
self.processor = Combinator(processor)
|
||||
@@ -99,55 +100,59 @@ class LiveChat:
|
||||
self._executor = ThreadPoolExecutor(max_workers=2)
|
||||
self._direct_mode = direct_mode
|
||||
self._is_alive = True
|
||||
self._is_replay = force_replay
|
||||
self._parser = Parser(is_replay = self._is_replay)
|
||||
self._is_replay = force_replay or (replay_continuation is not None)
|
||||
self._parser = Parser(is_replay=self._is_replay)
|
||||
self._pauser = Queue()
|
||||
self._pauser.put_nowait(None)
|
||||
self._setup()
|
||||
self._first_fetch = True
|
||||
self._fetch_url = "live_chat/get_live_chat?continuation="
|
||||
self._first_fetch = replay_continuation is None
|
||||
self._fetch_url = config._sml if replay_continuation is None else config._smr
|
||||
self._topchat_only = topchat_only
|
||||
self._dat = ''
|
||||
self._last_offset_ms = 0
|
||||
self._logger = logger
|
||||
LiveChat._logger = logger
|
||||
if not LiveChat._setup_finished:
|
||||
LiveChat._setup_finished = True
|
||||
self._event = Event()
|
||||
self.continuation = replay_continuation
|
||||
|
||||
self.exception = None
|
||||
if interruptable:
|
||||
signal.signal(signal.SIGINT, (lambda a, b:
|
||||
(LiveChat.shutdown(None,signal.SIGINT,b))
|
||||
))
|
||||
LiveChat._listeners.append(self)
|
||||
signal.signal(signal.SIGINT, lambda a, b: self.terminate())
|
||||
self._setup()
|
||||
|
||||
def _setup(self):
|
||||
#direct modeがTrueでcallback未設定の場合例外発生。
|
||||
# An exception is raised when direct mode is true and no callback is set.
|
||||
if self._direct_mode:
|
||||
if self._callback is None:
|
||||
raise IllegalFunctionCall(
|
||||
raise exceptions.IllegalFunctionCall(
|
||||
"When direct_mode=True, callback parameter is required.")
|
||||
else:
|
||||
#direct modeがFalseでbufferが未設定ならばデフォルトのbufferを作成
|
||||
# Create a default buffer if `direct_mode` is False and buffer is not set.
|
||||
if self._buffer is None:
|
||||
self._buffer = Buffer(maxsize = 20)
|
||||
#callbackが指定されている場合はcallbackを呼ぶループタスクを作成
|
||||
self._buffer = Buffer(maxsize=20)
|
||||
# Create a loop task to call callback if the `callback` param is specified.
|
||||
if self._callback is None:
|
||||
pass
|
||||
else:
|
||||
#callbackを呼ぶループタスクの開始
|
||||
self._executor.submit(self._callback_loop,self._callback)
|
||||
#_listenループタスクの開始
|
||||
listen_task = self._executor.submit(self._startlisten)
|
||||
#add_done_callbackの登録
|
||||
# Start a loop task calling callback function.
|
||||
self._executor.submit(self._callback_loop, self._callback)
|
||||
# Start a loop task for _listen()
|
||||
self.listen_task = self._executor.submit(self._startlisten)
|
||||
# Register add_done_callback
|
||||
if self._done_callback is None:
|
||||
listen_task.add_done_callback(self.finish)
|
||||
self.listen_task.add_done_callback(self._finish)
|
||||
else:
|
||||
listen_task.add_done_callback(self._done_callback)
|
||||
self.listen_task.add_done_callback(self._done_callback)
|
||||
|
||||
def _startlisten(self):
|
||||
time.sleep(0.1) #sleep shortly to prohibit skipping fetching data
|
||||
time.sleep(0.1) # sleep shortly to prohibit skipping fetching data
|
||||
"""Fetch first continuation parameter,
|
||||
create and start _listen loop.
|
||||
"""
|
||||
initial_continuation = liveparam.getparam(self.video_id,3)
|
||||
self._listen(initial_continuation)
|
||||
if not self.continuation:
|
||||
self.continuation = liveparam.getparam(
|
||||
self._video_id,
|
||||
channel_id=util.get_channelid(httpx.Client(http2=True), self._video_id),
|
||||
past_sec=3)
|
||||
self._listen(self.continuation)
|
||||
|
||||
def _listen(self, continuation):
|
||||
''' Fetch chat data and store them into buffer,
|
||||
@@ -159,39 +164,41 @@ class LiveChat:
|
||||
parameter for next chat data
|
||||
'''
|
||||
try:
|
||||
with requests.Session() as session:
|
||||
with httpx.Client(http2=True) as client:
|
||||
while(continuation and self._is_alive):
|
||||
continuation = self._check_pause(continuation)
|
||||
contents = self._get_contents(
|
||||
continuation, session, headers)
|
||||
contents = self._get_contents(continuation, client, headers)
|
||||
metadata, chatdata = self._parser.parse(contents)
|
||||
|
||||
timeout = metadata['timeoutMs']/1000
|
||||
continuation = metadata.get('continuation')
|
||||
if continuation:
|
||||
self.continuation = continuation
|
||||
timeout = metadata['timeoutMs'] / 1000
|
||||
chat_component = {
|
||||
"video_id" : self.video_id,
|
||||
"timeout" : timeout,
|
||||
"chatdata" : chatdata
|
||||
"video_id": self._video_id,
|
||||
"timeout": timeout,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
time_mark =time.time()
|
||||
time_mark = time.time()
|
||||
if self._direct_mode:
|
||||
processed_chat = self.processor.process([chat_component])
|
||||
if isinstance(processed_chat,tuple):
|
||||
processed_chat = self.processor.process(
|
||||
[chat_component])
|
||||
if isinstance(processed_chat, tuple):
|
||||
self._callback(*processed_chat)
|
||||
else:
|
||||
self._callback(processed_chat)
|
||||
else:
|
||||
self._buffer.put(chat_component)
|
||||
diff_time = timeout - (time.time()-time_mark)
|
||||
time.sleep(diff_time if diff_time > 0 else 0)
|
||||
continuation = metadata.get('continuation')
|
||||
except ChatParseException as e:
|
||||
self._logger.debug(f"[{self.video_id}]{str(e)}")
|
||||
return
|
||||
except (TypeError , json.JSONDecodeError) :
|
||||
self._logger.error(f"{traceback.format_exc(limit = -1)}")
|
||||
return
|
||||
diff_time = timeout - (time.time() - time_mark)
|
||||
self._event.wait(diff_time if diff_time > 0 else 0)
|
||||
self._last_offset_ms = metadata.get('last_offset_ms', 0)
|
||||
except exceptions.ChatParseException as e:
|
||||
self._logger.debug(f"[{self._video_id}]{str(e)}")
|
||||
raise
|
||||
except Exception:
|
||||
self._logger.error(f"{traceback.format_exc(limit=-1)}")
|
||||
raise
|
||||
|
||||
self._logger.debug(f"[{self.video_id}]finished fetching chat.")
|
||||
self._logger.debug(f"[{self._video_id}] finished fetching chat.")
|
||||
|
||||
def _check_pause(self, continuation):
|
||||
if self._pauser.empty():
|
||||
@@ -202,10 +209,13 @@ class LiveChat:
|
||||
'''
|
||||
self._pauser.put_nowait(None)
|
||||
if not self._is_replay:
|
||||
continuation = liveparam.getparam(self.video_id,3)
|
||||
continuation = liveparam.getparam(
|
||||
self._video_id, channel_id=util.get_channelid(httpx.Client(http2=True), self._video_id),
|
||||
past_sec=3, topchat_only=self._topchat_only)
|
||||
|
||||
return continuation
|
||||
|
||||
def _get_contents(self, continuation, session, headers):
|
||||
def _get_contents(self, continuation, client, headers):
|
||||
'''Get 'continuationContents' from livechat json.
|
||||
If contents is None at first fetching,
|
||||
try to fetch archive chat data.
|
||||
@@ -214,60 +224,60 @@ class LiveChat:
|
||||
-------
|
||||
'continuationContents' which includes metadata & chat data.
|
||||
'''
|
||||
livechat_json = (
|
||||
self._get_livechat_json(continuation, session, headers)
|
||||
)
|
||||
contents = self._parser.get_contents(livechat_json)
|
||||
livechat_json = self._get_livechat_json(continuation, client, replay=self._is_replay, offset_ms=self._last_offset_ms)
|
||||
contents, dat = self._parser.get_contents(livechat_json)
|
||||
if self._dat == '' and dat:
|
||||
self._dat = dat
|
||||
if self._first_fetch:
|
||||
if contents is None or self._is_replay:
|
||||
'''Try to fetch archive chat data.'''
|
||||
self._parser.is_replay = True
|
||||
self._fetch_url = "live_chat_replay/get_live_chat_replay?continuation="
|
||||
self._fetch_url = config._smr
|
||||
continuation = arcparam.getparam(
|
||||
self.video_id, self.seektime, self._topchat_only)
|
||||
livechat_json = ( self._get_livechat_json(
|
||||
continuation, session, headers))
|
||||
self._video_id, self.seektime, self._topchat_only, util.get_channelid(client, self._video_id))
|
||||
livechat_json = (self._get_livechat_json(
|
||||
continuation, client, replay=True, offset_ms=self.seektime * 1000))
|
||||
reload_continuation = self._parser.reload_continuation(
|
||||
self._parser.get_contents(livechat_json))
|
||||
self._parser.get_contents(livechat_json)[0])
|
||||
if reload_continuation:
|
||||
livechat_json = (self._get_livechat_json(
|
||||
reload_continuation, session, headers))
|
||||
contents = self._parser.get_contents(livechat_json)
|
||||
reload_continuation, client, headers))
|
||||
contents, _ = self._parser.get_contents(livechat_json)
|
||||
self._is_replay = True
|
||||
self._first_fetch = False
|
||||
return contents
|
||||
|
||||
def _get_livechat_json(self, continuation, session, headers):
|
||||
def _get_livechat_json(self, continuation, client, replay: bool, offset_ms: int = 0):
|
||||
'''
|
||||
Get json which includes chat data.
|
||||
'''
|
||||
continuation = urllib.parse.quote(continuation)
|
||||
livechat_json = None
|
||||
status_code = 0
|
||||
url =f"https://www.youtube.com/{self._fetch_url}{continuation}&pbj=1"
|
||||
if offset_ms < 0:
|
||||
offset_ms = 0
|
||||
param = util.get_param(continuation, dat=self._dat, replay=replay, offsetms=offset_ms)
|
||||
for _ in range(MAX_RETRY + 1):
|
||||
with session.get(url ,headers = headers) as resp:
|
||||
try:
|
||||
text = resp.text
|
||||
livechat_json = json.loads(text)
|
||||
response = client.post(self._fetch_url, json=param)
|
||||
livechat_json = response.json()
|
||||
break
|
||||
except json.JSONDecodeError :
|
||||
time.sleep(1)
|
||||
except (json.JSONDecodeError, httpx.HTTPError):
|
||||
time.sleep(2)
|
||||
continue
|
||||
else:
|
||||
self._logger.error(f"[{self.video_id}]"
|
||||
f"Exceeded retry count. status_code={status_code}")
|
||||
return None
|
||||
self._logger.error(f"[{self._video_id}]"
|
||||
f"Exceeded retry count.")
|
||||
raise exceptions.RetryExceedMaxCount()
|
||||
return livechat_json
|
||||
|
||||
def _callback_loop(self,callback):
|
||||
""" コンストラクタでcallbackを指定している場合、バックグラウンドで
|
||||
callbackに指定された関数に一定間隔でチャットデータを投げる。
|
||||
def _callback_loop(self, callback):
|
||||
""" If a callback is specified in the constructor,
|
||||
it throws chat data at regular intervals to the
|
||||
function specified in the callback in the backgroun
|
||||
|
||||
Parameter
|
||||
---------
|
||||
callback : func
|
||||
加工済みのチャットデータを渡す先の関数。
|
||||
function to which the processed chat data is passed.
|
||||
"""
|
||||
while self.is_alive():
|
||||
items = self._buffer.get()
|
||||
@@ -278,17 +288,22 @@ class LiveChat:
|
||||
self._callback(processed_chat)
|
||||
|
||||
def get(self):
|
||||
""" bufferからデータを取り出し、processorに投げ、
|
||||
加工済みのチャットデータを返す。
|
||||
"""
|
||||
Retrieves data from the buffer,
|
||||
throws it to the processor,
|
||||
and returns the processed chat data.
|
||||
|
||||
Returns
|
||||
: Processorによって加工されたチャットデータ
|
||||
: Chat data processed by the Processor
|
||||
"""
|
||||
if self._callback is None:
|
||||
if self.is_alive():
|
||||
items = self._buffer.get()
|
||||
return self.processor.process(items)
|
||||
raise IllegalFunctionCall(
|
||||
"既にcallbackを登録済みのため、get()は実行できません。")
|
||||
else:
|
||||
return []
|
||||
raise exceptions.IllegalFunctionCall(
|
||||
"Callback parameter is already set, so get() cannot be performed.")
|
||||
|
||||
def is_replay(self):
|
||||
return self._is_replay
|
||||
@@ -308,25 +323,32 @@ class LiveChat:
|
||||
def is_alive(self):
|
||||
return self._is_alive
|
||||
|
||||
def finish(self,sender):
|
||||
'''Listener終了時のコールバック'''
|
||||
def _finish(self, sender):
|
||||
'''Called when the _listen() task finished.'''
|
||||
try:
|
||||
self.terminate()
|
||||
self._task_finished()
|
||||
except CancelledError:
|
||||
self._logger.debug(f'[{self.video_id}]cancelled:{sender}')
|
||||
self._logger.debug(f'[{self._video_id}] cancelled:{sender}')
|
||||
|
||||
def terminate(self):
|
||||
'''
|
||||
Listenerを終了する。
|
||||
'''
|
||||
if self._pauser.empty():
|
||||
self._pauser.put_nowait(None)
|
||||
self._is_alive = False
|
||||
if self._direct_mode == False:
|
||||
#bufferにダミーオブジェクトを入れてis_alive()を判定させる
|
||||
self._buffer.put({'chatdata':'','timeout':0})
|
||||
self._logger.info(f'[{self.video_id}]finished.')
|
||||
self._buffer.put({})
|
||||
self._event.set()
|
||||
self.processor.finalize()
|
||||
|
||||
@classmethod
|
||||
def shutdown(cls, event, sig = None, handler=None):
|
||||
cls._logger.debug("shutdown...")
|
||||
for t in LiveChat._listeners:
|
||||
t._is_alive = False
|
||||
def _task_finished(self):
|
||||
if self.is_alive():
|
||||
self.terminate()
|
||||
try:
|
||||
self.listen_task.result()
|
||||
except Exception as e:
|
||||
self.exception = e
|
||||
if not isinstance(e, exceptions.ChatParseException):
|
||||
self._logger.error(f'Internal exception - {type(e)}{str(e)}')
|
||||
self._logger.info(f'[{self._video_id}] finished.')
|
||||
|
||||
def raise_for_status(self):
|
||||
if self.exception is not None:
|
||||
raise self.exception
|
||||
|
||||
@@ -1,46 +1,81 @@
|
||||
class ChatParseException(Exception):
|
||||
'''
|
||||
チャットデータをパースするライブラリが投げる例外の基底クラス
|
||||
Base exception thrown by the parser
|
||||
'''
|
||||
pass
|
||||
|
||||
class NoYtinitialdataException(ChatParseException):
|
||||
'''
|
||||
配信ページ内にチャットデータurlが見つからないときに投げる例外
|
||||
'''
|
||||
pass
|
||||
|
||||
class ResponseContextError(ChatParseException):
|
||||
'''
|
||||
配信ページでチャットデータ無効の時に投げる例外
|
||||
'''
|
||||
pass
|
||||
|
||||
class NoLivechatRendererException(ChatParseException):
|
||||
'''
|
||||
チャットデータのJSON中にlivechatRendererがない時に投げる例外
|
||||
Thrown when chat data is invalid.
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
class NoContentsException(ChatParseException):
|
||||
class NoContents(ChatParseException):
|
||||
'''
|
||||
チャットデータのJSON中にContinuationContentsがない時に投げる例外
|
||||
Thrown when ContinuationContents is missing in JSON.
|
||||
'''
|
||||
pass
|
||||
|
||||
class NoContinuationsException(ChatParseException):
|
||||
|
||||
class NoContinuation(ChatParseException):
|
||||
'''
|
||||
チャットデータのContinuationContents中にcontinuationがない時に投げる例外
|
||||
Thrown when continuation is missing in ContinuationContents.
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
class IllegalFunctionCall(Exception):
|
||||
'''
|
||||
set_callback()を実行済みにもかかわらず
|
||||
get()を呼び出した場合の例外
|
||||
Thrown when get() is called even though
|
||||
set_callback() has been executed.
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
class InvalidVideoIdException(Exception):
|
||||
'''
|
||||
Thrown when the video_id is not exist (VideoInfo).
|
||||
'''
|
||||
def __init__(self, doc):
|
||||
self.msg = "InvalidVideoIdException"
|
||||
self.doc = doc
|
||||
|
||||
|
||||
class UnknownConnectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class RetryExceedMaxCount(Exception):
|
||||
'''
|
||||
Thrown when the number of retries exceeds the maximum value.
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
class ChatDataFinished(ChatParseException):
|
||||
pass
|
||||
|
||||
|
||||
class ReceivedUnknownContinuation(ChatParseException):
|
||||
pass
|
||||
|
||||
|
||||
class FailedExtractContinuation(ChatDataFinished):
|
||||
pass
|
||||
|
||||
|
||||
class VideoInfoParseError(Exception):
|
||||
'''
|
||||
Base exception when parsing video info.
|
||||
'''
|
||||
|
||||
|
||||
class PatternUnmatchError(VideoInfoParseError):
|
||||
'''
|
||||
Thrown when failed to parse video info with unmatched pattern.
|
||||
'''
|
||||
def __init__(self, doc=''):
|
||||
self.msg = "PatternUnmatchError"
|
||||
self.doc = doc
|
||||
|
||||
@@ -1,107 +1,37 @@
|
||||
from . import enc
|
||||
from base64 import urlsafe_b64encode as b64enc
|
||||
from functools import reduce
|
||||
import math
|
||||
import random
|
||||
import urllib.parse
|
||||
from urllib.parse import quote
|
||||
|
||||
'''
|
||||
Generate continuation parameter of youtube replay chat.
|
||||
|
||||
Author: taizan-hokuto (2019) @taizan205
|
||||
def _header(video_id, channel_id) -> str:
|
||||
S1_3 = enc.rs(1, video_id)
|
||||
S1_5 = enc.rs(1, channel_id) + enc.rs(2, video_id)
|
||||
S1 = enc.rs(3, S1_3) + enc.rs(5, S1_5)
|
||||
S3 = enc.rs(48687757, enc.rs(1, video_id))
|
||||
header_replay = enc.rs(1, S1) + enc.rs(3, S3) + enc.nm(4, 1)
|
||||
return b64enc(header_replay)
|
||||
|
||||
ver 0.0.1 2019.10.05
|
||||
'''
|
||||
|
||||
def _gen_vid(video_id):
|
||||
"""generate video_id parameter.
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
|
||||
Return
|
||||
---------
|
||||
bytes : base64 encoded video_id parameter.
|
||||
"""
|
||||
header_magic = b'\x0A\x0F\x1A\x0D\x0A'
|
||||
header_id = video_id.encode()
|
||||
header_sep_1 = b'\x1A\x13\xEA\xA8\xDD\xB9\x01\x0D\x0A\x0B'
|
||||
header_terminator = b'\x20\x01'
|
||||
|
||||
item = [
|
||||
header_magic,
|
||||
_nval(len(header_id)),
|
||||
header_id,
|
||||
header_sep_1,
|
||||
header_id,
|
||||
header_terminator
|
||||
]
|
||||
|
||||
return urllib.parse.quote(
|
||||
b64enc(reduce(lambda x, y: x+y, item)).decode()
|
||||
).encode()
|
||||
|
||||
def _nval(val):
|
||||
"""convert value to byte array"""
|
||||
if val<0: raise ValueError
|
||||
buf = b''
|
||||
while val >> 7:
|
||||
m = val & 0xFF | 0x80
|
||||
buf += m.to_bytes(1,'big')
|
||||
val >>= 7
|
||||
buf += val.to_bytes(1,'big')
|
||||
return buf
|
||||
|
||||
def _build(video_id, seektime, topchat_only):
|
||||
switch_01 = b'\x04' if topchat_only else b'\x01'
|
||||
def _build(video_id, seektime, topchat_only, channel_id) -> str:
|
||||
chattype = 4 if topchat_only else 1
|
||||
if seektime < 0:
|
||||
times =_nval(0)
|
||||
switch = b'\x04'
|
||||
elif seektime == 0:
|
||||
times =_nval(1)
|
||||
switch = b'\x03'
|
||||
else:
|
||||
times =_nval(int(seektime*1000000))
|
||||
switch = b'\x03'
|
||||
parity = b'\x00'
|
||||
seektime = 0
|
||||
timestamp = int(seektime * 1000000)
|
||||
header = enc.rs(3, _header(video_id, channel_id))
|
||||
timestamp = enc.nm(5, timestamp)
|
||||
s6 = enc.nm(6, 0)
|
||||
s7 = enc.nm(7, 0)
|
||||
s8 = enc.nm(8, 0)
|
||||
s9 = enc.nm(9, 4)
|
||||
s10 = enc.rs(10, enc.nm(4, 0))
|
||||
chattype = enc.rs(14, enc.nm(1, 4))
|
||||
s15 = enc.nm(15, 0)
|
||||
entity = b''.join((header, timestamp, s6, s7, s8, s9, s10, chattype, s15))
|
||||
continuation = enc.rs(156074452, entity)
|
||||
return quote(b64enc(continuation).decode())
|
||||
|
||||
header_magic= b'\xA2\x9D\xB0\xD3\x04'
|
||||
sep_0 = b'\x1A'
|
||||
vid = _gen_vid(video_id)
|
||||
time_tag = b'\x28'
|
||||
timestamp1 = times
|
||||
sep_1 = b'\x30\x00\x38\x00\x40\x00\x48'
|
||||
sep_2 = b'\x52\x1C\x08\x00\x10\x00\x18\x00\x20\x00'
|
||||
chkstr = b'\x2A\x0E\x73\x74\x61\x74\x69\x63\x63\x68\x65\x63\x6B\x73\x75\x6D\x40'
|
||||
sep_3 = b'\x00\x58\x03\x60'
|
||||
sep_4 = b'\x68' + parity + b'\x72\x04\x08'
|
||||
sep_5 = b'\x10' + parity + b'\x78\x00'
|
||||
body = [
|
||||
sep_0,
|
||||
_nval(len(vid)),
|
||||
vid,
|
||||
time_tag,
|
||||
timestamp1,
|
||||
sep_1,
|
||||
switch,
|
||||
sep_2,
|
||||
chkstr,
|
||||
sep_3,
|
||||
switch_01,
|
||||
sep_4,
|
||||
switch_01,
|
||||
sep_5
|
||||
]
|
||||
|
||||
body = reduce(lambda x, y: x+y, body)
|
||||
|
||||
return urllib.parse.quote(
|
||||
b64enc( header_magic +
|
||||
_nval(len(body)) +
|
||||
body
|
||||
).decode()
|
||||
)
|
||||
|
||||
def getparam(video_id, seektime = 0, topchat_only = False):
|
||||
def getparam(video_id, seektime=0, topchat_only=False, channel_id='') -> str:
|
||||
'''
|
||||
Parameter
|
||||
---------
|
||||
@@ -111,4 +41,4 @@ def getparam(video_id, seektime = 0, topchat_only = False):
|
||||
topchat_only : bool
|
||||
if True, fetch only 'top chat'
|
||||
'''
|
||||
return _build(video_id, seektime, topchat_only)
|
||||
return _build(video_id, seektime, topchat_only, channel_id)
|
||||
|
||||
@@ -1,133 +0,0 @@
|
||||
from base64 import urlsafe_b64encode as b64enc
|
||||
from functools import reduce
|
||||
import math
|
||||
import random
|
||||
import urllib.parse
|
||||
|
||||
'''
|
||||
Generate continuation parameter of youtube replay chat.
|
||||
|
||||
Author: taizan-hokuto (2019) @taizan205
|
||||
|
||||
ver 0.0.1 2019.10.05
|
||||
'''
|
||||
|
||||
def _gen_vid_long(video_id):
|
||||
"""generate video_id parameter.
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
|
||||
Return
|
||||
---------
|
||||
byte[] : base64 encoded video_id parameter.
|
||||
"""
|
||||
header_magic = b'\x0A\x0F\x1A\x0D\x0A'
|
||||
header_id = video_id.encode()
|
||||
header_sep_1 = b'\x1A\x13\xEA\xA8\xDD\xB9\x01\x0D\x0A\x0B'
|
||||
header_terminator = b'\x20\x01'
|
||||
|
||||
item = [
|
||||
header_magic,
|
||||
_nval(len(header_id)),
|
||||
header_id,
|
||||
header_sep_1,
|
||||
header_id,
|
||||
header_terminator
|
||||
]
|
||||
|
||||
return urllib.parse.quote(
|
||||
b64enc(reduce(lambda x, y: x+y, item)).decode()
|
||||
).encode()
|
||||
|
||||
def _gen_vid(video_id):
|
||||
"""generate video_id parameter.
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
|
||||
Return
|
||||
---------
|
||||
bytes : base64 encoded video_id parameter.
|
||||
"""
|
||||
header_magic = b'\x0A\x0F\x1A\x0D\x0A'
|
||||
header_id = video_id.encode()
|
||||
header_terminator = b'\x20\x01'
|
||||
|
||||
item = [
|
||||
header_magic,
|
||||
_nval(len(header_id)),
|
||||
header_id,
|
||||
header_terminator
|
||||
]
|
||||
|
||||
return urllib.parse.quote(
|
||||
b64enc(reduce(lambda x, y: x+y, item)).decode()
|
||||
).encode()
|
||||
|
||||
def _nval(val):
|
||||
"""convert value to byte array"""
|
||||
if val<0: raise ValueError
|
||||
buf = b''
|
||||
while val >> 7:
|
||||
m = val & 0xFF | 0x80
|
||||
buf += m.to_bytes(1,'big')
|
||||
val >>= 7
|
||||
buf += val.to_bytes(1,'big')
|
||||
return buf
|
||||
|
||||
def _build(video_id, seektime, topchat_only):
|
||||
switch_01 = b'\x04' if topchat_only else b'\x01'
|
||||
if seektime < 0:
|
||||
raise ValueError("seektime must be greater than or equal to zero.")
|
||||
if seektime == 0:
|
||||
times = b''
|
||||
else:
|
||||
times =_nval(int(seektime*1000))
|
||||
if seektime > 0:
|
||||
_len_time = ( b'\x5A'
|
||||
+ (len(times)+1).to_bytes(1,'big')
|
||||
+ b'\x10')
|
||||
else:
|
||||
_len_time = b''
|
||||
|
||||
header_magic = b'\xA2\x9D\xB0\xD3\x04'
|
||||
sep_0 = b'\x1A'
|
||||
vid = _gen_vid(video_id)
|
||||
_tag = b'\x40\x01'
|
||||
timestamp1 = times
|
||||
sep_1 = b'\x60\x04\x72\x02\x08'
|
||||
terminator = b'\x78\x01'
|
||||
|
||||
body = [
|
||||
sep_0,
|
||||
_nval(len(vid)),
|
||||
vid,
|
||||
_tag,
|
||||
_len_time,
|
||||
timestamp1,
|
||||
sep_1,
|
||||
switch_01,
|
||||
terminator
|
||||
]
|
||||
|
||||
body = reduce(lambda x, y: x+y, body)
|
||||
|
||||
return urllib.parse.quote(
|
||||
b64enc( header_magic +
|
||||
_nval(len(body)) +
|
||||
body
|
||||
).decode()
|
||||
)
|
||||
|
||||
def getparam(video_id, seektime = 0.0, topchat_only = False):
|
||||
'''
|
||||
Parameter
|
||||
---------
|
||||
seektime : int
|
||||
unit:seconds
|
||||
start position of fetching chat data.
|
||||
topchat_only : bool
|
||||
if True, fetch only 'top chat'
|
||||
'''
|
||||
return _build(video_id, seektime, topchat_only)
|
||||
24
pytchat/paramgen/enc.py
Normal file
24
pytchat/paramgen/enc.py
Normal file
@@ -0,0 +1,24 @@
|
||||
def vn(val):
|
||||
if val < 0:
|
||||
raise ValueError
|
||||
buf = b''
|
||||
while val >> 7:
|
||||
m = val & 0xFF | 0x80
|
||||
buf += m.to_bytes(1, 'big')
|
||||
val >>= 7
|
||||
buf += val.to_bytes(1, 'big')
|
||||
return buf
|
||||
|
||||
|
||||
def tp(a, b, ary):
|
||||
return vn((b << 3) | a) + ary
|
||||
|
||||
|
||||
def rs(a, ary):
|
||||
if isinstance(ary, str):
|
||||
ary = ary.encode()
|
||||
return tp(2, a, vn(len(ary)) + ary)
|
||||
|
||||
|
||||
def nm(a, ary):
|
||||
return tp(0, a, vn(ary))
|
||||
@@ -1,157 +1,64 @@
|
||||
from base64 import urlsafe_b64encode as b64enc
|
||||
from functools import reduce
|
||||
import time
|
||||
import random
|
||||
import urllib.parse
|
||||
import time
|
||||
from . import enc
|
||||
from base64 import urlsafe_b64encode as b64enc
|
||||
from urllib.parse import quote
|
||||
|
||||
'''
|
||||
Generate continuation parameter of youtube live chat.
|
||||
|
||||
Author: taizan-hokuto (2019) @taizan205
|
||||
def _header(video_id, channel_id) -> str:
|
||||
S1_3 = enc.rs(1, video_id)
|
||||
S1_5 = enc.rs(1, channel_id) + enc.rs(2, video_id)
|
||||
S1 = enc.rs(3, S1_3) + enc.rs(5, S1_5)
|
||||
S3 = enc.rs(48687757, enc.rs(1, video_id))
|
||||
header_replay = enc.rs(1, S1) + enc.rs(3, S3) + enc.nm(4, 1)
|
||||
return b64enc(header_replay)
|
||||
|
||||
ver 0.0.1 2019.10.05
|
||||
'''
|
||||
def _gen_vid(video_id):
|
||||
"""generate video_id parameter.
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
|
||||
Return
|
||||
---------
|
||||
byte[] : base64 encoded video_id parameter.
|
||||
"""
|
||||
header_magic = b'\x0A\x0F\x0A\x0D\x0A'
|
||||
header_id = video_id.encode()
|
||||
header_sep_1 = b'\x1A'
|
||||
header_sep_2 = b'\x43\xAA\xB9\xC1\xBD\x01\x3D\x0A'
|
||||
header_suburl = ('https://www.youtube.com/live_chat?v='
|
||||
f'{video_id}&is_popout=1').encode()
|
||||
header_terminator = b'\x20\x02'
|
||||
def _build(video_id, channel_id, ts1, ts2, ts3, ts4, ts5, topchat_only) -> str:
|
||||
chattype = 4 if topchat_only else 1
|
||||
|
||||
item = [
|
||||
header_magic,
|
||||
_nval(len(header_id)),
|
||||
header_id,
|
||||
header_sep_1,
|
||||
header_sep_2,
|
||||
_nval(len(header_suburl)),
|
||||
header_suburl,
|
||||
header_terminator
|
||||
]
|
||||
b1 = enc.nm(1, 0)
|
||||
b2 = enc.nm(2, 0)
|
||||
b3 = enc.nm(3, 0)
|
||||
b4 = enc.nm(4, 0)
|
||||
b7 = enc.rs(7, '')
|
||||
b8 = enc.nm(8, 0)
|
||||
b9 = enc.rs(9, '')
|
||||
timestamp2 = enc.nm(10, ts2)
|
||||
b11 = enc.nm(11, 3)
|
||||
b15 = enc.nm(15, 0)
|
||||
|
||||
return urllib.parse.quote(
|
||||
b64enc(reduce(lambda x, y: x+y, item)).decode()
|
||||
).encode()
|
||||
|
||||
def _tzparity(video_id,times):
|
||||
t=0
|
||||
for i,s in enumerate(video_id):
|
||||
ss = ord(s)
|
||||
if(ss % 2 == 0):
|
||||
t += ss*(12-i)
|
||||
else:
|
||||
t ^= ss*i
|
||||
|
||||
return ((times^t) % 2).to_bytes(1,'big')
|
||||
|
||||
def _nval(val):
|
||||
"""convert value to byte array"""
|
||||
if val<0: raise ValueError
|
||||
buf = b''
|
||||
while val >> 7:
|
||||
m = val & 0xFF | 0x80
|
||||
buf += m.to_bytes(1,'big')
|
||||
val >>= 7
|
||||
buf += val.to_bytes(1,'big')
|
||||
return buf
|
||||
|
||||
def _build(video_id, _ts1, _ts2, _ts3, _ts4, _ts5, topchat_only):
|
||||
#_short_type2
|
||||
switch_01 = b'\x04' if topchat_only else b'\x01'
|
||||
parity = _tzparity(video_id, _ts1^_ts2^_ts3^_ts4^_ts5)
|
||||
|
||||
header_magic= b'\xD2\x87\xCC\xC8\x03'
|
||||
sep_0 = b'\x1A'
|
||||
vid = _gen_vid(video_id)
|
||||
time_tag = b'\x28'
|
||||
timestamp1 = _nval(_ts1)
|
||||
sep_1 = b'\x30\x00\x38\x00\x40\x02\x4A'
|
||||
un_len = b'\x2B'
|
||||
sep_2 = b'\x08'+parity+b'\x10\x00\x18\x00\x20\x00'
|
||||
chkstr = b'\x2A\x0E\x73\x74\x61\x74\x69\x63\x63\x68\x65\x63\x6B\x73\x75\x6D'
|
||||
sep_3 = b'\x3A\x00\x40\x00\x4A'
|
||||
sep_4_len = b'\x02'
|
||||
sep_4 = b'\x08\x01'
|
||||
ts_2_start = b'\x50'
|
||||
timestamp2 = _nval(_ts2)
|
||||
ts_2_end = b'\x58'
|
||||
sep_5 = b'\x03'
|
||||
ts_3_start = b'\x50'
|
||||
timestamp3 = _nval(_ts3)
|
||||
ts_3_end = b'\x58'
|
||||
timestamp4 = _nval(_ts4)
|
||||
sep_6 = b'\x68'
|
||||
#switch
|
||||
sep_7 = b'\x82\x01\x04\x08'
|
||||
#switch
|
||||
sep_8 = b'\x10\x00'
|
||||
sep_9 = b'\x88\x01\x00\xA0\x01'
|
||||
timestamp5 = _nval(_ts5)
|
||||
|
||||
body = [
|
||||
sep_0,
|
||||
_nval(len(vid)),
|
||||
vid,
|
||||
time_tag,
|
||||
timestamp1,
|
||||
sep_1,
|
||||
un_len,
|
||||
sep_2,
|
||||
chkstr,
|
||||
sep_3,
|
||||
sep_4_len,
|
||||
sep_4,
|
||||
ts_2_start,
|
||||
timestamp2,
|
||||
ts_2_end,
|
||||
sep_5,
|
||||
ts_3_start,
|
||||
timestamp3,
|
||||
ts_3_end,
|
||||
timestamp4,
|
||||
sep_6,
|
||||
switch_01,#
|
||||
sep_7,
|
||||
switch_01,#
|
||||
sep_8,
|
||||
sep_9,
|
||||
timestamp5
|
||||
]
|
||||
|
||||
body = reduce(lambda x, y: x+y, body)
|
||||
|
||||
return urllib.parse.quote(
|
||||
b64enc( header_magic +
|
||||
_nval(len(body)) +
|
||||
body
|
||||
).decode()
|
||||
)
|
||||
header = enc.rs(3, _header(video_id, channel_id))
|
||||
timestamp1 = enc.nm(5, ts1)
|
||||
s6 = enc.nm(6, 0)
|
||||
s7 = enc.nm(7, 0)
|
||||
s8 = enc.nm(8, 1)
|
||||
body = enc.rs(9, b''.join(
|
||||
(b1, b2, b3, b4, b7, b8, b9, timestamp2, b11, b15)))
|
||||
timestamp3 = enc.nm(10, ts3)
|
||||
timestamp4 = enc.nm(11, ts4)
|
||||
s13 = enc.nm(13, chattype)
|
||||
chattype = enc.rs(16, enc.nm(1, chattype))
|
||||
s17 = enc.nm(17, 0)
|
||||
str19 = enc.rs(19, enc.nm(1, 0))
|
||||
timestamp5 = enc.nm(20, ts5)
|
||||
entity = b''.join((header, timestamp1, s6, s7, s8, body, timestamp3,
|
||||
timestamp4, s13, chattype, s17, str19, timestamp5))
|
||||
continuation = enc.rs(119693434, entity)
|
||||
return quote(b64enc(continuation).decode())
|
||||
|
||||
|
||||
def _times(past_sec):
|
||||
|
||||
n = int(time.time())
|
||||
|
||||
_ts1= n - random.uniform(0,1*3)
|
||||
_ts2= n - random.uniform(0.01,0.99)
|
||||
_ts3= n - past_sec + random.uniform(0,1)
|
||||
_ts4= n - random.uniform(10*60,60*60)
|
||||
_ts5= n - random.uniform(0.01,0.99)
|
||||
return list(map(lambda x:int(x*1000000),[_ts1,_ts2,_ts3,_ts4,_ts5]))
|
||||
_ts1 = n - random.uniform(0, 1 * 3)
|
||||
_ts2 = n - random.uniform(0.01, 0.99)
|
||||
_ts3 = n - past_sec + random.uniform(0, 1)
|
||||
_ts4 = n - random.uniform(10 * 60, 60 * 60)
|
||||
_ts5 = n - random.uniform(0.01, 0.99)
|
||||
return list(map(lambda x: int(x * 1000000), [_ts1, _ts2, _ts3, _ts4, _ts5]))
|
||||
|
||||
|
||||
def getparam(video_id, past_sec = 0, topchat_only = False):
|
||||
def getparam(video_id, channel_id, past_sec=0, topchat_only=False) -> str:
|
||||
'''
|
||||
Parameter
|
||||
---------
|
||||
@@ -160,5 +67,4 @@ def getparam(video_id, past_sec = 0, topchat_only = False):
|
||||
topchat_only : bool
|
||||
if True, fetch only 'top chat'
|
||||
'''
|
||||
return _build(video_id,*_times(past_sec),topchat_only)
|
||||
|
||||
return _build(video_id, channel_id, *_times(past_sec), topchat_only)
|
||||
@@ -4,28 +4,36 @@ pytchat.parser.live
|
||||
Parser of live chat JSON.
|
||||
"""
|
||||
|
||||
import json
|
||||
from .. exceptions import (
|
||||
ResponseContextError,
|
||||
NoContentsException,
|
||||
NoContinuationsException,
|
||||
ChatParseException )
|
||||
from .. import exceptions
|
||||
|
||||
|
||||
class Parser:
|
||||
'''
|
||||
Parser of chat json.
|
||||
|
||||
__slots__ = ['is_replay']
|
||||
Parameter
|
||||
----------
|
||||
is_replay : bool
|
||||
|
||||
def __init__(self, is_replay):
|
||||
exception_holder : Object [default:Npne]
|
||||
The object holding exceptions.
|
||||
This is passed from the parent livechat object.
|
||||
'''
|
||||
__slots__ = ['is_replay', 'exception_holder']
|
||||
|
||||
def __init__(self, is_replay, exception_holder=None):
|
||||
self.is_replay = is_replay
|
||||
self.exception_holder = exception_holder
|
||||
|
||||
def get_contents(self, jsn):
|
||||
if jsn is None:
|
||||
raise ChatParseException('Called with none JSON object.')
|
||||
if jsn['response']['responseContext'].get('errors'):
|
||||
raise ResponseContextError('The video_id would be wrong,'
|
||||
'or video is deleted or private.')
|
||||
contents=jsn['response'].get('continuationContents')
|
||||
return contents
|
||||
self.raise_exception(exceptions.IllegalFunctionCall('Called with none JSON object.'))
|
||||
if jsn.get("responseContext", {}).get("errors"):
|
||||
raise exceptions.ResponseContextError(
|
||||
'The video_id would be wrong, or video is deleted or private.')
|
||||
contents = jsn.get('continuationContents')
|
||||
visitor_data = jsn.get("responseContext", {}).get("visitorData")
|
||||
return contents, visitor_data
|
||||
|
||||
def parse(self, contents):
|
||||
"""
|
||||
@@ -46,60 +54,69 @@ class Parser:
|
||||
|
||||
if contents is None:
|
||||
'''Broadcasting end or cannot fetch chat stream'''
|
||||
raise NoContentsException('Chat data stream is empty.')
|
||||
self.raise_exception(exceptions.NoContents('Chat data stream is empty.'))
|
||||
|
||||
cont = contents['liveChatContinuation']['continuations'][0]
|
||||
if cont is None:
|
||||
raise NoContinuationsException('No Continuation')
|
||||
metadata = (cont.get('invalidationContinuationData') or
|
||||
cont.get('timedContinuationData') or
|
||||
cont.get('reloadContinuationData') or
|
||||
cont.get('liveChatReplayContinuationData')
|
||||
self.raise_exception(exceptions.NoContinuation('No Continuation'))
|
||||
metadata = (cont.get('invalidationContinuationData')
|
||||
or cont.get('timedContinuationData')
|
||||
or cont.get('reloadContinuationData')
|
||||
or cont.get('liveChatReplayContinuationData')
|
||||
)
|
||||
if metadata is None:
|
||||
if cont.get("playerSeekContinuationData"):
|
||||
raise ChatParseException('Finished chat data')
|
||||
self.raise_exception(exceptions.ChatDataFinished('Finished chat data'))
|
||||
unknown = list(cont.keys())[0]
|
||||
if unknown:
|
||||
raise ChatParseException(f"Received unknown continuation type:{unknown}")
|
||||
self.raise_exception(exceptions.ReceivedUnknownContinuation(
|
||||
f"Received unknown continuation type:{unknown}"))
|
||||
else:
|
||||
raise ChatParseException('Cannot extract continuation data')
|
||||
self.raise_exception(exceptions.FailedExtractContinuation('Cannot extract continuation data'))
|
||||
return self._create_data(metadata, contents)
|
||||
|
||||
def reload_continuation(self, contents):
|
||||
"""
|
||||
When `seektime = 0` or seektime is abbreviated ,
|
||||
When `seektime == 0` or seektime is abbreviated ,
|
||||
check if fetched chat json has no chat data.
|
||||
If so, try to fetch playerSeekContinuationData.
|
||||
This function must be run only first fetching.
|
||||
"""
|
||||
if contents is None:
|
||||
'''Broadcasting end or cannot fetch chat stream'''
|
||||
self.raise_exception(exceptions.NoContents('Chat data stream is empty.'))
|
||||
cont = contents['liveChatContinuation']['continuations'][0]
|
||||
|
||||
if cont.get("liveChatReplayContinuationData"):
|
||||
#chat data exist.
|
||||
# chat data exist.
|
||||
return None
|
||||
#chat data do not exist, get playerSeekContinuationData.
|
||||
# chat data do not exist, get playerSeekContinuationData.
|
||||
init_cont = cont.get("playerSeekContinuationData")
|
||||
if init_cont:
|
||||
return init_cont.get("continuation")
|
||||
raise ChatParseException('Finished chat data')
|
||||
self.raise_exception(exceptions.ChatDataFinished('Finished chat data'))
|
||||
|
||||
def _create_data(self, metadata, contents):
|
||||
actions = contents['liveChatContinuation'].get('actions')
|
||||
if self.is_replay:
|
||||
interval = self._get_interval(actions)
|
||||
metadata.setdefault("timeoutMs",interval)
|
||||
last_offset_ms = self._get_lastoffset(actions)
|
||||
metadata.setdefault("timeoutMs", 5000)
|
||||
metadata.setdefault("last_offset_ms", last_offset_ms)
|
||||
"""Archived chat has different structures than live chat,
|
||||
so make it the same format."""
|
||||
chatdata = [action["replayChatItemAction"]["actions"][0]
|
||||
for action in actions]
|
||||
else:
|
||||
metadata.setdefault('timeoutMs', 10000)
|
||||
metadata.setdefault('timeoutMs', 5000)
|
||||
chatdata = actions
|
||||
return metadata, chatdata
|
||||
|
||||
def _get_interval(self, actions: list):
|
||||
if actions is None:
|
||||
def _get_lastoffset(self, actions: list):
|
||||
if actions:
|
||||
return int(actions[-1]["replayChatItemAction"]["videoOffsetTimeMsec"])
|
||||
return 0
|
||||
start = int(actions[0]["replayChatItemAction"]["videoOffsetTimeMsec"])
|
||||
last = int(actions[-1]["replayChatItemAction"]["videoOffsetTimeMsec"])
|
||||
return (last - start)
|
||||
|
||||
def raise_exception(self, exception):
|
||||
if self.exception_holder is None:
|
||||
raise exception
|
||||
self.exception_holder = exception
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import json
|
||||
from .. import config
|
||||
from .. exceptions import (
|
||||
ResponseContextError,
|
||||
NoContentsException,
|
||||
NoContinuationsException )
|
||||
|
||||
|
||||
logger = config.logger(__name__)
|
||||
|
||||
class Parser:
|
||||
def parse(self, jsn):
|
||||
"""
|
||||
このparse関数はReplayChat._listen() 関数から定期的に呼び出される。
|
||||
引数jsnはYoutubeから取得したアーカイブ済みチャットデータの生JSONであり、
|
||||
このparse関数によって与えられたJSONを以下に分割して返す。
|
||||
+ timeout (次のチャットデータ取得までのインターバル)
|
||||
+ chat data(チャットデータ本体)
|
||||
+ continuation (次のチャットデータ取得に必要となるパラメータ).
|
||||
|
||||
ライブ配信のチャットとアーカイブ済み動画のチャットは構造が若干異なっているが、
|
||||
ライブチャットと同じデータ形式に変換することにより、
|
||||
同じprocessorでライブとリプレイどちらでも利用できるようにしている。
|
||||
|
||||
Parameter
|
||||
----------
|
||||
+ jsn : dict
|
||||
+ Youtubeから取得したチャットデータのJSONオブジェクト。
|
||||
(pythonの辞書形式に変換済みの状態で渡される)
|
||||
|
||||
Returns
|
||||
-------
|
||||
+ metadata : dict
|
||||
+ チャットデータに付随するメタデータ。timeout、 動画ID、continuationパラメータで構成される。
|
||||
+ chatdata : list[dict]
|
||||
+ チャットデータ本体のリスト。
|
||||
"""
|
||||
if jsn is None:
|
||||
return {'timeoutMs':0,'continuation':None},[]
|
||||
if jsn['response']['responseContext'].get('errors'):
|
||||
raise ResponseContextError('動画に接続できません。'
|
||||
'動画IDが間違っているか、動画が削除/非公開の可能性があります。')
|
||||
contents=jsn['response'].get('continuationContents')
|
||||
#配信が終了した場合、もしくはチャットデータが取得できない場合
|
||||
if contents is None:
|
||||
raise NoContentsException('チャットデータを取得できませんでした。')
|
||||
|
||||
cont = contents['liveChatContinuation']['continuations'][0]
|
||||
if cont is None:
|
||||
raise NoContinuationsException('Continuationがありません。')
|
||||
metadata = cont.get('liveChatReplayContinuationData')
|
||||
if metadata is None:
|
||||
unknown = list(cont.keys())[0]
|
||||
if unknown != "playerSeekContinuationData":
|
||||
logger.debug(f"Received unknown continuation type:{unknown}")
|
||||
metadata = cont.get(unknown)
|
||||
actions = contents['liveChatContinuation'].get('actions')
|
||||
if actions is None:
|
||||
#後続のチャットデータなし
|
||||
return {"continuation":None,"timeout":0,"chatdata":[]}
|
||||
interval = self.get_interval(actions)
|
||||
metadata.setdefault("timeoutMs",interval)
|
||||
"""アーカイブ済みチャットはライブチャットと構造が異なっているため、以下の行により
|
||||
ライブチャットと同じ形式にそろえる"""
|
||||
chatdata = [action["replayChatItemAction"]["actions"][0] for action in actions]
|
||||
return metadata, chatdata
|
||||
|
||||
def get_interval(self, actions: list):
|
||||
if actions is None:
|
||||
return 0
|
||||
start = int(actions[0]["replayChatItemAction"]["videoOffsetTimeMsec"])
|
||||
last = int(actions[-1]["replayChatItemAction"]["videoOffsetTimeMsec"])
|
||||
return (last - start)
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ class ChatProcessor:
|
||||
Abstract class that processes chat data.
|
||||
Receive chat data (actions) from Listener.
|
||||
'''
|
||||
|
||||
def process(self, chat_components: list):
|
||||
'''
|
||||
Interface that represents processing of chat data.
|
||||
@@ -21,7 +22,9 @@ class ChatProcessor:
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def finalize(self, *args, **kwargs):
|
||||
'''
|
||||
Interface for finalizing the process.
|
||||
Called when chat fetching finished.
|
||||
'''
|
||||
pass
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from .chat_processor import ChatProcessor
|
||||
|
||||
|
||||
class Combinator(ChatProcessor):
|
||||
'''
|
||||
Combinator combines multiple chat processors.
|
||||
@@ -36,4 +37,6 @@ class Combinator(ChatProcessor):
|
||||
return tuple(processor.process(chat_components)
|
||||
for processor in self.processors)
|
||||
|
||||
|
||||
def finalize(self, *args, **kwargs):
|
||||
[processor.finalize(*args, **kwargs)
|
||||
for processor in self.processors]
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import datetime
|
||||
import time
|
||||
from .renderer.textmessage import LiveChatTextMessageRenderer
|
||||
from .renderer.paidmessage import LiveChatPaidMessageRenderer
|
||||
from .renderer.paidsticker import LiveChatPaidStickerRenderer
|
||||
from .renderer.legacypaid import LiveChatLegacyPaidMessageRenderer
|
||||
from .renderer.membership import LiveChatMembershipItemRenderer
|
||||
from .. chat_processor import ChatProcessor
|
||||
from ... import config
|
||||
logger = config.logger(__name__)
|
||||
|
||||
|
||||
class CompatibleProcessor(ChatProcessor):
|
||||
|
||||
def process(self, chat_components: list):
|
||||
|
||||
chatlist = []
|
||||
timeout = 0
|
||||
ret={}
|
||||
ret = {}
|
||||
ret["kind"] = "youtube#liveChatMessageListResponse"
|
||||
ret["etag"] = ""
|
||||
ret["nextPageToken"] = ""
|
||||
@@ -24,19 +24,23 @@ class CompatibleProcessor(ChatProcessor):
|
||||
timeout += chat_component.get('timeout', 0)
|
||||
chatdata = chat_component.get('chatdata')
|
||||
|
||||
if chatdata is None: break
|
||||
if chatdata is None:
|
||||
break
|
||||
for action in chatdata:
|
||||
if action is None: continue
|
||||
if action.get('addChatItemAction') is None: continue
|
||||
if action['addChatItemAction'].get('item') is None: continue
|
||||
if action is None:
|
||||
continue
|
||||
if action.get('addChatItemAction') is None:
|
||||
continue
|
||||
if action['addChatItemAction'].get('item') is None:
|
||||
continue
|
||||
|
||||
chat = self.parse(action)
|
||||
if chat:
|
||||
chatlist.append(chat)
|
||||
ret["pollingIntervalMillis"] = int(timeout*1000)
|
||||
ret["pageInfo"]={
|
||||
"totalResults":len(chatlist),
|
||||
"resultsPerPage":len(chatlist),
|
||||
ret["pollingIntervalMillis"] = int(timeout * 1000)
|
||||
ret["pageInfo"] = {
|
||||
"totalResults": len(chatlist),
|
||||
"resultsPerPage": len(chatlist),
|
||||
}
|
||||
ret["items"] = chatlist
|
||||
|
||||
@@ -47,11 +51,12 @@ class CompatibleProcessor(ChatProcessor):
|
||||
action = sitem.get("addChatItemAction")
|
||||
if action:
|
||||
item = action.get("item")
|
||||
if item is None: return None
|
||||
rd={}
|
||||
if item is None:
|
||||
return None
|
||||
rd = {}
|
||||
try:
|
||||
renderer = self.get_renderer(item)
|
||||
if renderer == None:
|
||||
if renderer is None:
|
||||
return None
|
||||
|
||||
rd["kind"] = "youtube#liveChatMessage"
|
||||
@@ -59,7 +64,7 @@ class CompatibleProcessor(ChatProcessor):
|
||||
rd["id"] = 'LCC.' + renderer.get_id()
|
||||
rd["snippet"] = renderer.get_snippet()
|
||||
rd["authorDetails"] = renderer.get_authordetails()
|
||||
except (KeyError,TypeError,AttributeError) as e:
|
||||
except (KeyError, TypeError, AttributeError) as e:
|
||||
logger.error(f"Error: {str(type(e))}-{str(e)}")
|
||||
logger.error(f"item: {sitem}")
|
||||
return None
|
||||
@@ -71,11 +76,12 @@ class CompatibleProcessor(ChatProcessor):
|
||||
renderer = LiveChatTextMessageRenderer(item)
|
||||
elif item.get("liveChatPaidMessageRenderer"):
|
||||
renderer = LiveChatPaidMessageRenderer(item)
|
||||
elif item.get( "liveChatPaidStickerRenderer"):
|
||||
elif item.get("liveChatPaidStickerRenderer"):
|
||||
renderer = LiveChatPaidStickerRenderer(item)
|
||||
elif item.get("liveChatLegacyPaidMessageRenderer"):
|
||||
renderer = LiveChatLegacyPaidMessageRenderer(item)
|
||||
elif item.get("liveChatMembershipItemRenderer"):
|
||||
renderer = LiveChatMembershipItemRenderer(item)
|
||||
else:
|
||||
renderer = None
|
||||
return renderer
|
||||
|
||||
|
||||
@@ -1,83 +1,82 @@
|
||||
import datetime, pytz
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
TZ_UTC = timezone(timedelta(0), 'UTC')
|
||||
|
||||
|
||||
class BaseRenderer:
|
||||
def __init__(self, item, chattype):
|
||||
self.renderer = list(item.values())[0]
|
||||
self.chattype = chattype
|
||||
|
||||
|
||||
def get_snippet(self):
|
||||
|
||||
message = self.get_message(self.renderer)
|
||||
|
||||
return {
|
||||
"type" : self.chattype,
|
||||
"liveChatId" : "",
|
||||
"authorChannelId" : self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt" : self.get_publishedat(self.renderer.get("timestampUsec",0)),
|
||||
"hasDisplayContent" : True,
|
||||
"displayMessage" : message,
|
||||
"type": self.chattype,
|
||||
"liveChatId": "",
|
||||
"authorChannelId": self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt": self.get_publishedat(self.renderer.get("timestampUsec", 0)),
|
||||
"hasDisplayContent": True,
|
||||
"displayMessage": message,
|
||||
"textMessageDetails": {
|
||||
"messageText" : message
|
||||
"messageText": message
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_authordetails(self):
|
||||
authorExternalChannelId = self.renderer.get("authorExternalChannelId")
|
||||
#parse subscriber type
|
||||
# parse subscriber type
|
||||
isVerified, isChatOwner, isChatSponsor, isChatModerator = (
|
||||
self.get_badges(self.renderer)
|
||||
)
|
||||
return {
|
||||
"channelId" : authorExternalChannelId,
|
||||
"channelUrl" : "http://www.youtube.com/channel/"+authorExternalChannelId,
|
||||
"displayName" : self.renderer["authorName"]["simpleText"],
|
||||
"profileImageUrl" : self.renderer["authorPhoto"]["thumbnails"][1]["url"] ,
|
||||
"isVerified" : isVerified,
|
||||
"isChatOwner" : isChatOwner,
|
||||
"isChatSponsor" : isChatSponsor,
|
||||
"isChatModerator" : isChatModerator
|
||||
"channelId": authorExternalChannelId,
|
||||
"channelUrl": "http://www.youtube.com/channel/" + authorExternalChannelId,
|
||||
"displayName": self.renderer["authorName"]["simpleText"],
|
||||
"profileImageUrl": self.renderer["authorPhoto"]["thumbnails"][1]["url"],
|
||||
"isVerified": isVerified,
|
||||
"isChatOwner": isChatOwner,
|
||||
"isChatSponsor": isChatSponsor,
|
||||
"isChatModerator": isChatModerator
|
||||
}
|
||||
|
||||
|
||||
def get_message(self,renderer):
|
||||
def get_message(self, renderer):
|
||||
message = ''
|
||||
if renderer.get("message"):
|
||||
runs=renderer["message"].get("runs")
|
||||
runs = renderer["message"].get("runs")
|
||||
if runs:
|
||||
for r in runs:
|
||||
if r:
|
||||
if r.get('emoji'):
|
||||
message += r['emoji'].get('shortcuts',[''])[0]
|
||||
message += r['emoji'].get('shortcuts', [''])[0]
|
||||
else:
|
||||
message += r.get('text','')
|
||||
message += r.get('text', '')
|
||||
return message
|
||||
|
||||
def get_badges(self,renderer):
|
||||
def get_badges(self, renderer):
|
||||
isVerified = False
|
||||
isChatOwner = False
|
||||
isChatSponsor = False
|
||||
isChatModerator = False
|
||||
badges=renderer.get("authorBadges")
|
||||
badges = renderer.get("authorBadges")
|
||||
if badges:
|
||||
for badge in badges:
|
||||
author_type = badge["liveChatAuthorBadgeRenderer"]["accessibility"]["accessibilityData"]["label"]
|
||||
if author_type == '確認済み':
|
||||
if author_type == 'VERIFIED' or author_type == '確認済み':
|
||||
isVerified = True
|
||||
if author_type == '所有者':
|
||||
if author_type == 'OWNER' or author_type == '所有者':
|
||||
isChatOwner = True
|
||||
if 'メンバー' in author_type:
|
||||
if 'メンバー' in author_type or 'MEMBER' in author_type:
|
||||
isChatSponsor = True
|
||||
if author_type == 'モデレーター':
|
||||
if author_type == 'MODERATOR' or author_type == 'モデレーター':
|
||||
isChatModerator = True
|
||||
return isVerified, isChatOwner, isChatSponsor, isChatModerator
|
||||
|
||||
def get_id(self):
|
||||
return self.renderer.get('id')
|
||||
|
||||
def get_publishedat(self,timestamp):
|
||||
dt = datetime.datetime.fromtimestamp(int(timestamp)/1000000)
|
||||
return dt.astimezone(pytz.utc).isoformat(
|
||||
timespec='milliseconds').replace('+00:00','Z')
|
||||
|
||||
def get_publishedat(self, timestamp):
|
||||
dt = datetime.fromtimestamp(int(timestamp) / 1000000)
|
||||
return dt.astimezone(TZ_UTC).isoformat(
|
||||
timespec='milliseconds').replace('+00:00', 'Z')
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
'''
|
||||
YouTubeスーパーチャットで使用される通貨の記号とレート検索用の略号の
|
||||
対応表
|
||||
Table of symbols for the currencies used in YouTube Superchat.
|
||||
|
||||
Key:
|
||||
YouTubeスーパーチャットで使用される通貨の記号
|
||||
(アルファベットで終わる場合、0xA0(&npsp)が付く)
|
||||
Currency symbols used in YouTube Super Chat
|
||||
If it ends with an alphabet, it will be followed by 0xA0(&npsp).
|
||||
Value:
|
||||
fxtext: 3文字の通貨略称
|
||||
jptest: 日本語テキスト
|
||||
fxtext: ISO 4217 currency code
|
||||
jptest: japanese text
|
||||
'''
|
||||
symbols = {
|
||||
"$": {"fxtext": "USD", "jptext": "米・ドル"},
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from .base import BaseRenderer
|
||||
|
||||
|
||||
class LiveChatLegacyPaidMessageRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "newSponsorEvent")
|
||||
@@ -8,36 +10,33 @@ class LiveChatLegacyPaidMessageRenderer(BaseRenderer):
|
||||
message = self.get_message(self.renderer)
|
||||
|
||||
return {
|
||||
"type" : self.chattype,
|
||||
"liveChatId" : "",
|
||||
"authorChannelId" : self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt" : self.get_publishedat(self.renderer.get("timestampUsec",0)),
|
||||
"hasDisplayContent" : True,
|
||||
"displayMessage" : message,
|
||||
"type": self.chattype,
|
||||
"liveChatId": "",
|
||||
"authorChannelId": self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt": self.get_publishedat(self.renderer.get("timestampUsec", 0)),
|
||||
"hasDisplayContent": True,
|
||||
"displayMessage": message,
|
||||
|
||||
}
|
||||
|
||||
def get_authordetails(self):
|
||||
authorExternalChannelId = self.renderer.get("authorExternalChannelId")
|
||||
#parse subscriber type
|
||||
# parse subscriber type
|
||||
isVerified, isChatOwner, _, isChatModerator = (
|
||||
self.get_badges(self.renderer)
|
||||
)
|
||||
return {
|
||||
"channelId" : authorExternalChannelId,
|
||||
"channelUrl" : "http://www.youtube.com/channel/"+authorExternalChannelId,
|
||||
"displayName" : self.renderer["authorName"]["simpleText"],
|
||||
"profileImageUrl" : self.renderer["authorPhoto"]["thumbnails"][1]["url"] ,
|
||||
"isVerified" : isVerified,
|
||||
"isChatOwner" : isChatOwner,
|
||||
"isChatSponsor" : True,
|
||||
"isChatModerator" : isChatModerator
|
||||
"channelId": authorExternalChannelId,
|
||||
"channelUrl": "http://www.youtube.com/channel/" + authorExternalChannelId,
|
||||
"displayName": self.renderer["authorName"]["simpleText"],
|
||||
"profileImageUrl": self.renderer["authorPhoto"]["thumbnails"][1]["url"],
|
||||
"isVerified": isVerified,
|
||||
"isChatOwner": isChatOwner,
|
||||
"isChatSponsor": True,
|
||||
"isChatModerator": isChatModerator
|
||||
}
|
||||
|
||||
|
||||
def get_message(self,renderer):
|
||||
def get_message(self, renderer):
|
||||
message = (renderer["eventText"]["runs"][0]["text"]
|
||||
)+' / '+(renderer["detailText"]["simpleText"])
|
||||
) + ' / ' + (renderer["detailText"]["simpleText"])
|
||||
return message
|
||||
|
||||
|
||||
|
||||
40
pytchat/processors/compatible/renderer/membership.py
Normal file
40
pytchat/processors/compatible/renderer/membership.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from .base import BaseRenderer
|
||||
|
||||
|
||||
class LiveChatMembershipItemRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "newSponsorEvent")
|
||||
|
||||
def get_snippet(self):
|
||||
message = self.get_message(self.renderer)
|
||||
return {
|
||||
"type": self.chattype,
|
||||
"liveChatId": "",
|
||||
"authorChannelId": self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt": self.get_publishedat(self.renderer.get("timestampUsec", 0)),
|
||||
"hasDisplayContent": True,
|
||||
"displayMessage": message,
|
||||
|
||||
}
|
||||
|
||||
def get_authordetails(self):
|
||||
authorExternalChannelId = self.renderer.get("authorExternalChannelId")
|
||||
# parse subscriber type
|
||||
isVerified, isChatOwner, _, isChatModerator = (
|
||||
self.get_badges(self.renderer)
|
||||
)
|
||||
return {
|
||||
"channelId": authorExternalChannelId,
|
||||
"channelUrl": "http://www.youtube.com/channel/" + authorExternalChannelId,
|
||||
"displayName": self.renderer["authorName"]["simpleText"],
|
||||
"profileImageUrl": self.renderer["authorPhoto"]["thumbnails"][1]["url"],
|
||||
"isVerified": isVerified,
|
||||
"isChatOwner": isChatOwner,
|
||||
"isChatSponsor": True,
|
||||
"isChatModerator": isChatModerator
|
||||
}
|
||||
|
||||
def get_message(self, renderer):
|
||||
message = ''.join([mes.get("text", "")
|
||||
for mes in renderer["headerSubtext"]["runs"]])
|
||||
return message, [message]
|
||||
@@ -3,6 +3,7 @@ from . import currency
|
||||
from .base import BaseRenderer
|
||||
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
||||
|
||||
|
||||
class LiveChatPaidMessageRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "superChatEvent")
|
||||
@@ -10,31 +11,31 @@ class LiveChatPaidMessageRenderer(BaseRenderer):
|
||||
def get_snippet(self):
|
||||
authorName = self.renderer["authorName"]["simpleText"]
|
||||
message = self.get_message(self.renderer)
|
||||
amountDisplayString, symbol, amountMicros =(
|
||||
amountDisplayString, symbol, amountMicros = (
|
||||
self.get_amountdata(self.renderer)
|
||||
)
|
||||
return {
|
||||
"type" : self.chattype,
|
||||
"liveChatId" : "",
|
||||
"authorChannelId" : self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt" : self.get_publishedat(self.renderer.get("timestampUsec",0)),
|
||||
"hasDisplayContent" : True,
|
||||
"displayMessage" : amountDisplayString+" from "+authorName+': \"'+ message+'\"',
|
||||
"superChatDetails" : {
|
||||
"amountMicros" : amountMicros,
|
||||
"currency" : currency.symbols[symbol]["fxtext"] if currency.symbols.get(symbol) else symbol,
|
||||
"amountDisplayString" : amountDisplayString,
|
||||
"tier" : 0,
|
||||
"backgroundColor" : self.renderer.get("bodyBackgroundColor", 0)
|
||||
"type": self.chattype,
|
||||
"liveChatId": "",
|
||||
"authorChannelId": self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt": self.get_publishedat(self.renderer.get("timestampUsec", 0)),
|
||||
"hasDisplayContent": True,
|
||||
"displayMessage": amountDisplayString + " from " + authorName + ': \"' + message + '\"',
|
||||
"superChatDetails": {
|
||||
"amountMicros": amountMicros,
|
||||
"currency": currency.symbols[symbol]["fxtext"] if currency.symbols.get(symbol) else symbol,
|
||||
"amountDisplayString": amountDisplayString,
|
||||
"tier": 0,
|
||||
"backgroundColor": self.renderer.get("bodyBackgroundColor", 0)
|
||||
}
|
||||
}
|
||||
|
||||
def get_amountdata(self,renderer):
|
||||
def get_amountdata(self, renderer):
|
||||
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
||||
m = superchat_regex.search(amountDisplayString)
|
||||
if m:
|
||||
symbol = m.group(1)
|
||||
amountMicros = int(float(m.group(2).replace(',',''))*1000000)
|
||||
amountMicros = int(float(m.group(2).replace(',', '')) * 1000000)
|
||||
else:
|
||||
symbol = ""
|
||||
amountMicros = 0
|
||||
|
||||
@@ -3,46 +3,45 @@ from . import currency
|
||||
from .base import BaseRenderer
|
||||
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
||||
|
||||
|
||||
class LiveChatPaidStickerRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "superStickerEvent")
|
||||
|
||||
def get_snippet(self):
|
||||
authorName = self.renderer["authorName"]["simpleText"]
|
||||
amountDisplayString, symbol, amountMicros =(
|
||||
amountDisplayString, symbol, amountMicros = (
|
||||
self.get_amountdata(self.renderer)
|
||||
)
|
||||
|
||||
return {
|
||||
"type" : self.chattype,
|
||||
"liveChatId" : "",
|
||||
"authorChannelId" : self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt" : self.get_publishedat(self.renderer.get("timestampUsec",0)),
|
||||
"hasDisplayContent" : True,
|
||||
"displayMessage" : "Super Sticker " + amountDisplayString + " from "+authorName,
|
||||
"superStickerDetails" : {
|
||||
"superStickerMetaData" : {
|
||||
"type": self.chattype,
|
||||
"liveChatId": "",
|
||||
"authorChannelId": self.renderer.get("authorExternalChannelId"),
|
||||
"publishedAt": self.get_publishedat(self.renderer.get("timestampUsec", 0)),
|
||||
"hasDisplayContent": True,
|
||||
"displayMessage": "Super Sticker " + amountDisplayString + " from " + authorName,
|
||||
"superStickerDetails": {
|
||||
"superStickerMetaData": {
|
||||
"stickerId": "",
|
||||
"altText": "",
|
||||
"language": ""
|
||||
},
|
||||
"amountMicros" : amountMicros,
|
||||
"currency" : currency.symbols[symbol]["fxtext"] if currency.symbols.get(symbol) else symbol,
|
||||
"amountDisplayString" : amountDisplayString,
|
||||
"tier" : 0,
|
||||
"backgroundColor" : self.renderer.get("bodyBackgroundColor", 0)
|
||||
"amountMicros": amountMicros,
|
||||
"currency": currency.symbols[symbol]["fxtext"] if currency.symbols.get(symbol) else symbol,
|
||||
"amountDisplayString": amountDisplayString,
|
||||
"tier": 0,
|
||||
"backgroundColor": self.renderer.get("bodyBackgroundColor", 0)
|
||||
}
|
||||
}
|
||||
|
||||
def get_amountdata(self,renderer):
|
||||
def get_amountdata(self, renderer):
|
||||
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
||||
m = superchat_regex.search(amountDisplayString)
|
||||
if m:
|
||||
symbol = m.group(1)
|
||||
amountMicros = int(float(m.group(2).replace(',',''))*1000000)
|
||||
amountMicros = int(float(m.group(2).replace(',', '')) * 1000000)
|
||||
else:
|
||||
symbol = ""
|
||||
amountMicros = 0
|
||||
return amountDisplayString, symbol, amountMicros
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from .base import BaseRenderer
|
||||
|
||||
|
||||
class LiveChatTextMessageRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "textMessageEvent")
|
||||
|
||||
11
pytchat/processors/default/custom_encoder.py
Normal file
11
pytchat/processors/default/custom_encoder.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import json
|
||||
from .renderer.base import Author
|
||||
from .renderer.paidmessage import Colors
|
||||
from .renderer.paidsticker import Colors2
|
||||
|
||||
|
||||
class CustomEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, Author) or isinstance(obj, Colors) or isinstance(obj, Colors2):
|
||||
return vars(obj)
|
||||
return json.JSONEncoder.default(self, obj)
|
||||
@@ -1,31 +1,132 @@
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
from .custom_encoder import CustomEncoder
|
||||
from .renderer.textmessage import LiveChatTextMessageRenderer
|
||||
from .renderer.paidmessage import LiveChatPaidMessageRenderer
|
||||
from .renderer.paidsticker import LiveChatPaidStickerRenderer
|
||||
from .renderer.legacypaid import LiveChatLegacyPaidMessageRenderer
|
||||
from .renderer.membership import LiveChatMembershipItemRenderer
|
||||
from .. chat_processor import ChatProcessor
|
||||
from ... import config
|
||||
|
||||
logger = config.logger(__name__)
|
||||
|
||||
|
||||
class Chat:
|
||||
def json(self) -> str:
|
||||
return json.dumps(vars(self), ensure_ascii=False, cls=CustomEncoder)
|
||||
|
||||
|
||||
class Chatdata:
|
||||
def __init__(self,chatlist:list, timeout:float):
|
||||
|
||||
def __init__(self, chatlist: list, timeout: float, abs_diff):
|
||||
self.items = chatlist
|
||||
self.interval = timeout
|
||||
self.abs_diff = abs_diff
|
||||
self.itemcount = 0
|
||||
|
||||
def tick(self):
|
||||
if self.interval == 0:
|
||||
'''DEPRECATE
|
||||
Use sync_items()
|
||||
'''
|
||||
if len(self.items) < 1:
|
||||
time.sleep(1)
|
||||
return
|
||||
time.sleep(self.interval/len(self.items))
|
||||
if self.itemcount == 0:
|
||||
self.starttime = time.time()
|
||||
if len(self.items) == 1:
|
||||
total_itemcount = 1
|
||||
else:
|
||||
total_itemcount = len(self.items) - 1
|
||||
next_chattime = (self.items[0].timestamp + (self.items[-1].timestamp - self.items[0].timestamp) / total_itemcount * self.itemcount) / 1000
|
||||
tobe_disptime = self.abs_diff + next_chattime
|
||||
wait_sec = tobe_disptime - time.time()
|
||||
self.itemcount += 1
|
||||
|
||||
if wait_sec < 0:
|
||||
wait_sec = 0
|
||||
|
||||
time.sleep(wait_sec)
|
||||
|
||||
async def tick_async(self):
|
||||
if self.interval == 0:
|
||||
'''DEPRECATE
|
||||
Use async_items()
|
||||
'''
|
||||
if len(self.items) < 1:
|
||||
await asyncio.sleep(1)
|
||||
return
|
||||
await asyncio.sleep(self.interval/len(self.items))
|
||||
if self.itemcount == 0:
|
||||
self.starttime = time.time()
|
||||
if len(self.items) == 1:
|
||||
total_itemcount = 1
|
||||
else:
|
||||
total_itemcount = len(self.items) - 1
|
||||
next_chattime = (self.items[0].timestamp + (self.items[-1].timestamp - self.items[0].timestamp) / total_itemcount * self.itemcount) / 1000
|
||||
tobe_disptime = self.abs_diff + next_chattime
|
||||
wait_sec = tobe_disptime - time.time()
|
||||
self.itemcount += 1
|
||||
|
||||
if wait_sec < 0:
|
||||
wait_sec = 0
|
||||
|
||||
await asyncio.sleep(wait_sec)
|
||||
|
||||
def sync_items(self):
|
||||
starttime = time.time()
|
||||
if len(self.items) > 0:
|
||||
last_chattime = self.items[-1].timestamp / 1000
|
||||
tobe_disptime = self.abs_diff + last_chattime
|
||||
wait_total_sec = max(tobe_disptime - time.time(), 0)
|
||||
if len(self.items) > 1:
|
||||
wait_sec = wait_total_sec / len(self.items)
|
||||
elif len(self.items) == 1:
|
||||
wait_sec = 0
|
||||
for c in self.items:
|
||||
if wait_sec < 0:
|
||||
wait_sec = 0
|
||||
time.sleep(wait_sec)
|
||||
yield c
|
||||
stop_interval = time.time() - starttime
|
||||
if stop_interval < 1:
|
||||
time.sleep(1 - stop_interval)
|
||||
|
||||
async def async_items(self):
|
||||
starttime = time.time()
|
||||
if len(self.items) > 0:
|
||||
last_chattime = self.items[-1].timestamp / 1000
|
||||
tobe_disptime = self.abs_diff + last_chattime
|
||||
wait_total_sec = max(tobe_disptime - time.time(), 0)
|
||||
if len(self.items) > 1:
|
||||
wait_sec = wait_total_sec / len(self.items)
|
||||
elif len(self.items) == 1:
|
||||
wait_sec = 0
|
||||
for c in self.items:
|
||||
if wait_sec < 0:
|
||||
wait_sec = 0
|
||||
await asyncio.sleep(wait_sec)
|
||||
yield c
|
||||
|
||||
stop_interval = time.time() - starttime
|
||||
if stop_interval < 1:
|
||||
await asyncio.sleep(1 - stop_interval)
|
||||
|
||||
def json(self) -> str:
|
||||
return ''.join(("[", ','.join((a.json() for a in self.items)), "]"))
|
||||
|
||||
|
||||
class DefaultProcessor(ChatProcessor):
|
||||
def __init__(self):
|
||||
self.first = True
|
||||
self.abs_diff = 0
|
||||
self.renderers = {
|
||||
"liveChatTextMessageRenderer": LiveChatTextMessageRenderer(),
|
||||
"liveChatPaidMessageRenderer": LiveChatPaidMessageRenderer(),
|
||||
"liveChatPaidStickerRenderer": LiveChatPaidStickerRenderer(),
|
||||
"liveChatLegacyPaidMessageRenderer": LiveChatLegacyPaidMessageRenderer(),
|
||||
"liveChatMembershipItemRenderer": LiveChatMembershipItemRenderer()
|
||||
}
|
||||
|
||||
def process(self, chat_components: list):
|
||||
|
||||
chatlist = []
|
||||
@@ -33,47 +134,46 @@ class DefaultProcessor(ChatProcessor):
|
||||
|
||||
if chat_components:
|
||||
for component in chat_components:
|
||||
if component is None:
|
||||
continue
|
||||
timeout += component.get('timeout', 0)
|
||||
chatdata = component.get('chatdata')
|
||||
if chatdata is None: continue
|
||||
chatdata = component.get('chatdata') # if from Extractor, chatdata is generator.
|
||||
if chatdata is None:
|
||||
continue
|
||||
for action in chatdata:
|
||||
if action is None: continue
|
||||
if action.get('addChatItemAction') is None: continue
|
||||
if action['addChatItemAction'].get('item') is None: continue
|
||||
|
||||
chat = self._parse(action)
|
||||
if action is None:
|
||||
continue
|
||||
if action.get('addChatItemAction') is None:
|
||||
continue
|
||||
item = action['addChatItemAction'].get('item')
|
||||
if item is None:
|
||||
continue
|
||||
chat = self._parse(item)
|
||||
if chat:
|
||||
chatlist.append(chat)
|
||||
return Chatdata(chatlist, float(timeout))
|
||||
|
||||
if self.first and chatlist:
|
||||
self.abs_diff = time.time() - chatlist[0].timestamp / 1000
|
||||
self.first = False
|
||||
|
||||
def _parse(self, sitem):
|
||||
chatdata = Chatdata(chatlist, float(timeout), self.abs_diff)
|
||||
|
||||
action = sitem.get("addChatItemAction")
|
||||
if action:
|
||||
item = action.get("item")
|
||||
if item is None: return None
|
||||
return chatdata
|
||||
|
||||
def _parse(self, item):
|
||||
try:
|
||||
renderer = self._get_renderer(item)
|
||||
if renderer == None:
|
||||
key = list(item.keys())[0]
|
||||
renderer = self.renderers.get(key)
|
||||
if renderer is None:
|
||||
return None
|
||||
|
||||
renderer.setitem(item.get(key), Chat())
|
||||
renderer.settype()
|
||||
renderer.get_snippet()
|
||||
renderer.get_authordetails()
|
||||
except (KeyError,TypeError) as e:
|
||||
logger.error(f"{str(type(e))}-{str(e)} sitem:{str(sitem)}")
|
||||
rendered_chatobj = renderer.get_chatobj()
|
||||
renderer.clear()
|
||||
except (KeyError, TypeError) as e:
|
||||
logger.error(f"{str(type(e))}-{str(e)} item:{str(item)}")
|
||||
return None
|
||||
return renderer
|
||||
|
||||
def _get_renderer(self, item):
|
||||
if item.get("liveChatTextMessageRenderer"):
|
||||
renderer = LiveChatTextMessageRenderer(item)
|
||||
elif item.get("liveChatPaidMessageRenderer"):
|
||||
renderer = LiveChatPaidMessageRenderer(item)
|
||||
elif item.get( "liveChatPaidStickerRenderer"):
|
||||
renderer = LiveChatPaidStickerRenderer(item)
|
||||
elif item.get("liveChatLegacyPaidMessageRenderer"):
|
||||
renderer = LiveChatLegacyPaidMessageRenderer(item)
|
||||
else:
|
||||
renderer = None
|
||||
return renderer
|
||||
return rendered_chatobj
|
||||
|
||||
@@ -1,75 +1,79 @@
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Author:
|
||||
pass
|
||||
|
||||
|
||||
class BaseRenderer:
|
||||
def __init__(self, item, chattype):
|
||||
self.renderer = list(item.values())[0]
|
||||
self.chattype = chattype
|
||||
self.author = Author()
|
||||
def setitem(self, item, chat):
|
||||
self.item = item
|
||||
self.chat = chat
|
||||
self.chat.author = Author()
|
||||
|
||||
def settype(self):
|
||||
pass
|
||||
|
||||
def get_snippet(self):
|
||||
self.type = self.chattype
|
||||
self.id = self.renderer.get('id')
|
||||
timestampUsec = int(self.renderer.get("timestampUsec",0))
|
||||
self.timestamp = int(timestampUsec/1000)
|
||||
tst = self.renderer.get("timestampText")
|
||||
self.chat.id = self.item.get('id')
|
||||
timestampUsec = int(self.item.get("timestampUsec", 0))
|
||||
self.chat.timestamp = int(timestampUsec / 1000)
|
||||
tst = self.item.get("timestampText")
|
||||
if tst:
|
||||
self.elapsedTime = tst.get("simpleText")
|
||||
self.chat.elapsedTime = tst.get("simpleText")
|
||||
else:
|
||||
self.elapsedTime = ""
|
||||
self.datetime = self.get_datetime(timestampUsec)
|
||||
self.message ,self.messageEx = self.get_message(self.renderer)
|
||||
self.id = self.renderer.get('id')
|
||||
self.amountValue= 0.0
|
||||
self.amountString = ""
|
||||
self.currency= ""
|
||||
self.bgColor = 0
|
||||
self.chat.elapsedTime = ""
|
||||
self.chat.datetime = self.get_datetime(timestampUsec)
|
||||
self.chat.message, self.chat.messageEx = self.get_message(self.item)
|
||||
self.chat.id = self.item.get('id')
|
||||
self.chat.amountValue = 0.0
|
||||
self.chat.amountString = ""
|
||||
self.chat.currency = ""
|
||||
self.chat.bgColor = 0
|
||||
|
||||
def get_authordetails(self):
|
||||
self.author.badgeUrl = ""
|
||||
(self.author.isVerified,
|
||||
self.author.isChatOwner,
|
||||
self.author.isChatSponsor,
|
||||
self.author.isChatModerator) = (
|
||||
self.get_badges(self.renderer)
|
||||
self.chat.author.badgeUrl = ""
|
||||
(self.chat.author.isVerified,
|
||||
self.chat.author.isChatOwner,
|
||||
self.chat.author.isChatSponsor,
|
||||
self.chat.author.isChatModerator) = (
|
||||
self.get_badges(self.item)
|
||||
)
|
||||
self.author.channelId = self.renderer.get("authorExternalChannelId")
|
||||
self.author.channelUrl = "http://www.youtube.com/channel/"+self.author.channelId
|
||||
self.author.name = self.renderer["authorName"]["simpleText"]
|
||||
self.author.imageUrl= self.renderer["authorPhoto"]["thumbnails"][1]["url"]
|
||||
self.chat.author.channelId = self.item.get("authorExternalChannelId")
|
||||
self.chat.author.channelUrl = "http://www.youtube.com/channel/" + self.chat.author.channelId
|
||||
self.chat.author.name = self.item["authorName"]["simpleText"]
|
||||
self.chat.author.imageUrl = self.item["authorPhoto"]["thumbnails"][1]["url"]
|
||||
|
||||
|
||||
|
||||
def get_message(self,renderer):
|
||||
def get_message(self, item):
|
||||
message = ''
|
||||
message_ex = []
|
||||
if renderer.get("message"):
|
||||
runs=renderer["message"].get("runs")
|
||||
if runs:
|
||||
runs = item.get("message", {}).get("runs", {})
|
||||
for r in runs:
|
||||
if r:
|
||||
if not hasattr(r, "get"):
|
||||
continue
|
||||
if r.get('emoji'):
|
||||
message += r['emoji'].get('shortcuts',[''])[0]
|
||||
message_ex.append(r['emoji']['image']['thumbnails'][1].get('url'))
|
||||
message += r['emoji'].get('shortcuts', [''])[0]
|
||||
message_ex.append({
|
||||
'id': r['emoji'].get('emojiId').split('/')[-1],
|
||||
'txt': r['emoji'].get('shortcuts', [''])[0],
|
||||
'url': r['emoji']['image']['thumbnails'][0].get('url')
|
||||
})
|
||||
else:
|
||||
message += r.get('text','')
|
||||
message_ex.append(r.get('text',''))
|
||||
message += r.get('text', '')
|
||||
message_ex.append(r.get('text', ''))
|
||||
return message, message_ex
|
||||
|
||||
|
||||
|
||||
def get_badges(self,renderer):
|
||||
self.author.type = ''
|
||||
def get_badges(self, renderer):
|
||||
self.chat.author.type = ''
|
||||
isVerified = False
|
||||
isChatOwner = False
|
||||
isChatSponsor = False
|
||||
isChatModerator = False
|
||||
badges=renderer.get("authorBadges")
|
||||
if badges:
|
||||
badges = renderer.get("authorBadges", {})
|
||||
for badge in badges:
|
||||
if badge["liveChatAuthorBadgeRenderer"].get("icon"):
|
||||
author_type = badge["liveChatAuthorBadgeRenderer"]["icon"]["iconType"]
|
||||
self.author.type = author_type
|
||||
self.chat.author.type = author_type
|
||||
if author_type == 'VERIFIED':
|
||||
isVerified = True
|
||||
if author_type == 'OWNER':
|
||||
@@ -78,16 +82,20 @@ class BaseRenderer:
|
||||
isChatModerator = True
|
||||
if badge["liveChatAuthorBadgeRenderer"].get("customThumbnail"):
|
||||
isChatSponsor = True
|
||||
self.author.type = 'MEMBER'
|
||||
self.chat.author.type = 'MEMBER'
|
||||
self.get_badgeurl(badge)
|
||||
return isVerified, isChatOwner, isChatSponsor, isChatModerator
|
||||
|
||||
def get_badgeurl(self, badge):
|
||||
self.chat.author.badgeUrl = badge["liveChatAuthorBadgeRenderer"]["customThumbnail"]["thumbnails"][0]["url"]
|
||||
|
||||
def get_badgeurl(self,badge):
|
||||
self.author.badgeUrl = badge["liveChatAuthorBadgeRenderer"]["customThumbnail"]["thumbnails"][0]["url"]
|
||||
|
||||
|
||||
|
||||
def get_datetime(self,timestamp):
|
||||
dt = datetime.fromtimestamp(timestamp/1000000)
|
||||
def get_datetime(self, timestamp):
|
||||
dt = datetime.fromtimestamp(timestamp / 1000000)
|
||||
return dt.strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
def get_chatobj(self):
|
||||
return self.chat
|
||||
|
||||
def clear(self):
|
||||
self.item = None
|
||||
self.chat = None
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
from .base import BaseRenderer
|
||||
|
||||
|
||||
class LiveChatLegacyPaidMessageRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "newSponsor")
|
||||
|
||||
|
||||
def settype(self):
|
||||
self.chat.type = "newSponsor"
|
||||
|
||||
def get_authordetails(self):
|
||||
super().get_authordetails()
|
||||
self.author.isChatSponsor = True
|
||||
|
||||
|
||||
def get_message(self,renderer):
|
||||
message = (renderer["eventText"]["runs"][0]["text"]
|
||||
)+' / '+(renderer["detailText"]["simpleText"])
|
||||
return message
|
||||
|
||||
self.chat.author.isChatSponsor = True
|
||||
|
||||
def get_message(self, item):
|
||||
message = (item["eventText"]["runs"][0]["text"]
|
||||
) + ' / ' + (item["detailText"]["simpleText"])
|
||||
return message, [message]
|
||||
|
||||
18
pytchat/processors/default/renderer/membership.py
Normal file
18
pytchat/processors/default/renderer/membership.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from .base import BaseRenderer
|
||||
|
||||
|
||||
class LiveChatMembershipItemRenderer(BaseRenderer):
|
||||
def settype(self):
|
||||
self.chat.type = "newSponsor"
|
||||
|
||||
def get_authordetails(self):
|
||||
super().get_authordetails()
|
||||
self.chat.author.isChatSponsor = True
|
||||
|
||||
def get_message(self, item):
|
||||
try:
|
||||
message = ''.join([mes.get("text", "")
|
||||
for mes in item["headerSubtext"]["runs"]])
|
||||
except KeyError:
|
||||
return "Welcome New Member!", ["Welcome New Member!"]
|
||||
return message, [message]
|
||||
@@ -3,30 +3,45 @@ from . import currency
|
||||
from .base import BaseRenderer
|
||||
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
||||
|
||||
class LiveChatPaidMessageRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "superChat")
|
||||
|
||||
class Colors:
|
||||
pass
|
||||
|
||||
|
||||
class LiveChatPaidMessageRenderer(BaseRenderer):
|
||||
def settype(self):
|
||||
self.chat.type = "superChat"
|
||||
|
||||
def get_snippet(self):
|
||||
super().get_snippet()
|
||||
amountDisplayString, symbol, amount =(
|
||||
self.get_amountdata(self.renderer)
|
||||
amountDisplayString, symbol, amount = (
|
||||
self.get_amountdata(self.item)
|
||||
)
|
||||
self.amountValue= amount
|
||||
self.amountString = amountDisplayString
|
||||
self.currency= currency.symbols[symbol]["fxtext"] if currency.symbols.get(symbol) else symbol
|
||||
self.bgColor= self.renderer.get("bodyBackgroundColor", 0)
|
||||
self.chat.amountValue = amount
|
||||
self.chat.amountString = amountDisplayString
|
||||
self.chat.currency = currency.symbols[symbol]["fxtext"] if currency.symbols.get(
|
||||
symbol) else symbol
|
||||
self.chat.bgColor = self.item.get("bodyBackgroundColor", 0)
|
||||
self.chat.colors = self.get_colors()
|
||||
|
||||
|
||||
|
||||
def get_amountdata(self,renderer):
|
||||
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
||||
def get_amountdata(self, item):
|
||||
amountDisplayString = item["purchaseAmountText"]["simpleText"]
|
||||
m = superchat_regex.search(amountDisplayString)
|
||||
if m:
|
||||
symbol = m.group(1)
|
||||
amount = float(m.group(2).replace(',',''))
|
||||
amount = float(m.group(2).replace(',', ''))
|
||||
else:
|
||||
symbol = ""
|
||||
amount = 0.0
|
||||
return amountDisplayString, symbol, amount
|
||||
|
||||
def get_colors(self):
|
||||
item = self.item
|
||||
colors = Colors()
|
||||
colors.headerBackgroundColor = item.get("headerBackgroundColor", 0)
|
||||
colors.headerTextColor = item.get("headerTextColor", 0)
|
||||
colors.bodyBackgroundColor = item.get("bodyBackgroundColor", 0)
|
||||
colors.bodyTextColor = item.get("bodyTextColor", 0)
|
||||
colors.timestampColor = item.get("timestampColor", 0)
|
||||
colors.authorNameTextColor = item.get("authorNameTextColor", 0)
|
||||
return colors
|
||||
|
||||
@@ -3,37 +3,45 @@ from . import currency
|
||||
from .base import BaseRenderer
|
||||
superchat_regex = re.compile(r"^(\D*)(\d{1,3}(,\d{3})*(\.\d*)*\b)$")
|
||||
|
||||
class LiveChatPaidStickerRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "superSticker")
|
||||
|
||||
class Colors2:
|
||||
pass
|
||||
|
||||
|
||||
class LiveChatPaidStickerRenderer(BaseRenderer):
|
||||
def settype(self):
|
||||
self.chat.type = "superSticker"
|
||||
|
||||
def get_snippet(self):
|
||||
super().get_snippet()
|
||||
amountDisplayString, symbol, amount =(
|
||||
self.get_amountdata(self.renderer)
|
||||
amountDisplayString, symbol, amount = (
|
||||
self.get_amountdata(self.item)
|
||||
)
|
||||
self.amountValue = amount
|
||||
self.amountString = amountDisplayString
|
||||
self.currency = currency.symbols[symbol]["fxtext"] if currency.symbols.get(symbol) else symbol
|
||||
self.bgColor = self.renderer.get("moneyChipBackgroundColor", 0)
|
||||
self.sticker = "https:"+self.renderer["sticker"]["thumbnails"][0]["url"]
|
||||
self.chat.amountValue = amount
|
||||
self.chat.amountString = amountDisplayString
|
||||
self.chat.currency = currency.symbols[symbol]["fxtext"] if currency.symbols.get(
|
||||
symbol) else symbol
|
||||
self.chat.bgColor = self.item.get("backgroundColor", 0)
|
||||
self.chat.sticker = "".join(("https:",
|
||||
self.item["sticker"]["thumbnails"][0]["url"]))
|
||||
self.chat.colors = self.get_colors()
|
||||
|
||||
|
||||
|
||||
def get_amountdata(self,renderer):
|
||||
amountDisplayString = renderer["purchaseAmountText"]["simpleText"]
|
||||
def get_amountdata(self, item):
|
||||
amountDisplayString = item["purchaseAmountText"]["simpleText"]
|
||||
m = superchat_regex.search(amountDisplayString)
|
||||
if m:
|
||||
symbol = m.group(1)
|
||||
amount = float(m.group(2).replace(',',''))
|
||||
amount = float(m.group(2).replace(',', ''))
|
||||
else:
|
||||
symbol = ""
|
||||
amount = 0.0
|
||||
return amountDisplayString, symbol, amount
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def get_colors(self):
|
||||
item = self.item
|
||||
colors = Colors2()
|
||||
colors.moneyChipBackgroundColor = item.get("moneyChipBackgroundColor", 0)
|
||||
colors.moneyChipTextColor = item.get("moneyChipTextColor", 0)
|
||||
colors.backgroundColor = item.get("backgroundColor", 0)
|
||||
colors.authorNameTextColor = item.get("authorNameTextColor", 0)
|
||||
return colors
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from .base import BaseRenderer
|
||||
|
||||
|
||||
class LiveChatTextMessageRenderer(BaseRenderer):
|
||||
def __init__(self, item):
|
||||
super().__init__(item, "textMessage")
|
||||
def settype(self):
|
||||
self.chat.type = "textMessage"
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from .chat_processor import ChatProcessor
|
||||
|
||||
|
||||
class DummyProcessor(ChatProcessor):
|
||||
'''
|
||||
Dummy processor just returns received chat_components directly.
|
||||
'''
|
||||
|
||||
def process(self, chat_components: list):
|
||||
return chat_components
|
||||
|
||||
@@ -1,41 +1,78 @@
|
||||
import csv
|
||||
import httpx
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from base64 import standard_b64encode
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from .chat_processor import ChatProcessor
|
||||
from .default.processor import DefaultProcessor
|
||||
from ..exceptions import UnknownConnectionError
|
||||
import tempfile
|
||||
|
||||
PATTERN = re.compile(r"(.*)\(([0-9]+)\)$")
|
||||
fmt_headers = ['datetime','elapsed','authorName','message','superchat'
|
||||
,'type','authorChannel']
|
||||
|
||||
fmt_headers = ['datetime', 'elapsed', 'authorName',
|
||||
'message', 'superchat', 'type', 'authorChannel']
|
||||
|
||||
HEADER_HTML = '''
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html;charset=UTF-8">
|
||||
'''
|
||||
|
||||
TABLE_CSS = '''
|
||||
table.css {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table.css thead{
|
||||
border-collapse: collapse;
|
||||
border: 1px solid #000
|
||||
}
|
||||
|
||||
table.css tr td{
|
||||
padding: 0.3em;
|
||||
border: 1px solid #000
|
||||
}
|
||||
|
||||
table.css th{
|
||||
padding: 0.3em;
|
||||
border: 1px solid #000
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
class HTMLArchiver(ChatProcessor):
|
||||
'''
|
||||
HtmlArchiver saves chat data as HTML table format.
|
||||
HTMLArchiver saves chat data as HTML table format.
|
||||
'''
|
||||
|
||||
def __init__(self, save_path):
|
||||
def __init__(self, save_path, callback=None):
|
||||
super().__init__()
|
||||
self.client = httpx.Client(http2=True)
|
||||
self.save_path = self._checkpath(save_path)
|
||||
with open(self.save_path, mode='a', encoding = 'utf-8') as f:
|
||||
f.write('<table border="1" style="border-collapse: collapse">')
|
||||
f.writelines(self._parse_html_header(fmt_headers))
|
||||
self.processor = DefaultProcessor()
|
||||
self.emoji_table = {} # dict for custom emojis. key: emoji_id, value: base64 encoded image binary.
|
||||
self.callback = callback
|
||||
self.executor = ThreadPoolExecutor(max_workers=10)
|
||||
self.tmp_fp = tempfile.NamedTemporaryFile(mode="a", encoding="utf-8", delete=False)
|
||||
self.tmp_filename = self.tmp_fp.name
|
||||
self.counter = 0
|
||||
|
||||
def _checkpath(self, filepath):
|
||||
splitter = os.path.splitext(os.path.basename(filepath))
|
||||
body = splitter[0]
|
||||
extention = splitter[1]
|
||||
newpath = filepath
|
||||
counter = 0
|
||||
counter = 1
|
||||
while os.path.exists(newpath):
|
||||
match = re.search(PATTERN,body)
|
||||
match = re.search(PATTERN, body)
|
||||
if match:
|
||||
counter=int(match[2])+1
|
||||
counter = int(match[2]) + 1
|
||||
num_with_bracket = f'({str(counter)})'
|
||||
body = f'{match[1]}{num_with_bracket}'
|
||||
else:
|
||||
body = f'{body}({str(counter)})'
|
||||
newpath = os.path.join(os.path.dirname(filepath),body+extention)
|
||||
newpath = os.path.join(os.path.dirname(filepath), body + extention)
|
||||
return newpath
|
||||
|
||||
def process(self, chat_components: list):
|
||||
@@ -46,47 +83,88 @@ class HTMLArchiver(ChatProcessor):
|
||||
save_path : str :
|
||||
Actual save path of file.
|
||||
total_lines : int :
|
||||
count of total lines written to the file.
|
||||
Count of total lines written to the file.
|
||||
"""
|
||||
if chat_components is None or len (chat_components) == 0:
|
||||
return
|
||||
|
||||
with open(self.save_path, mode='a', encoding = 'utf-8') as f:
|
||||
chats = self.processor.process(chat_components).items
|
||||
for c in chats:
|
||||
f.writelines(
|
||||
self._parse_html_line([
|
||||
if chat_components is None or len(chat_components) == 0:
|
||||
return self.save_path, self.counter
|
||||
for c in self.processor.process(chat_components).items:
|
||||
self.tmp_fp.write(
|
||||
self._parse_html_line((
|
||||
c.datetime,
|
||||
c.elapsedTime,
|
||||
c.author.name,
|
||||
c.message,
|
||||
self._parse_message(c.messageEx),
|
||||
c.amountString,
|
||||
c.author.type,
|
||||
c.author.channelId]
|
||||
c.author.channelId)
|
||||
)
|
||||
)
|
||||
'''
|
||||
#Palliative treatment#
|
||||
Comment out below line to prevent the table
|
||||
display from collapsing.
|
||||
'''
|
||||
#f.write('</table>')
|
||||
if self.callback:
|
||||
self.callback(None, 1)
|
||||
self.counter += 1
|
||||
return self.save_path, self.counter
|
||||
|
||||
def _parse_html_line(self, raw_line):
|
||||
html = ''
|
||||
html+=' <tr>'
|
||||
for cell in raw_line:
|
||||
html+='<td>'+cell+'</td>'
|
||||
html+='</tr>\n'
|
||||
return html
|
||||
return ''.join(('<tr>',
|
||||
''.join(''.join(('<td>', cell, '</td>')) for cell in raw_line),
|
||||
'</tr>\n'))
|
||||
|
||||
def _parse_html_header(self,raw_line):
|
||||
html = ''
|
||||
html+='<thead>\n'
|
||||
html+=' <tr>'
|
||||
for cell in raw_line:
|
||||
html+='<th>'+cell+'</th>'
|
||||
html+='</tr>\n'
|
||||
html+='</thead>\n'
|
||||
return html
|
||||
def _parse_table_header(self, raw_line):
|
||||
return ''.join(('<thead><tr>',
|
||||
''.join(''.join(('<th>', cell, '</th>')) for cell in raw_line),
|
||||
'</tr></thead>\n'))
|
||||
|
||||
def _parse_message(self, message_items: list) -> str:
|
||||
return ''.join(''.join(('<span class="', self._set_emoji_table(item), '" title="', item['txt'], '"></span>'))
|
||||
if type(item) is dict else item
|
||||
for item in message_items)
|
||||
|
||||
def _encode_img(self, url):
|
||||
err = None
|
||||
for _ in range(5):
|
||||
try:
|
||||
resp = self.client.get(url, timeout=30)
|
||||
break
|
||||
except httpx.HTTPError as e:
|
||||
err = e
|
||||
time.sleep(3)
|
||||
else:
|
||||
raise UnknownConnectionError(str(err))
|
||||
|
||||
return standard_b64encode(resp.content).decode()
|
||||
|
||||
def _set_emoji_table(self, item: dict):
|
||||
emoji_id = ''.join(('Z', item['id'])) if 48 <= ord(item['id'][0]) <= 57 else item['id']
|
||||
if emoji_id not in self.emoji_table:
|
||||
self.emoji_table.setdefault(emoji_id, self.executor.submit(self._encode_img, item['url']))
|
||||
return emoji_id
|
||||
|
||||
def _stylecode(self, name, code, width, height):
|
||||
return ''.join((".", name, " { display: inline-block; background-image: url(data:image/png;base64,",
|
||||
code, "); background-repeat: no-repeat; width: ",
|
||||
str(width), "; height: ", str(height), ";}"))
|
||||
|
||||
def _create_styles(self):
|
||||
return '\n'.join(('<style type="text/css">',
|
||||
TABLE_CSS,
|
||||
'\n'.join(self._stylecode(key, self.emoji_table[key].result(), 24, 24)
|
||||
for key in self.emoji_table.keys()),
|
||||
'</style>\n'))
|
||||
|
||||
def finalize(self):
|
||||
if self.tmp_fp:
|
||||
self.tmp_fp.flush()
|
||||
self.tmp_fp = None
|
||||
with open(self.save_path, mode='w', encoding='utf-8') as outfile:
|
||||
# write header
|
||||
outfile.writelines((
|
||||
HEADER_HTML, self._create_styles(), '</head>\n',
|
||||
'<body>\n', '<table class="css">\n',
|
||||
self._parse_table_header(fmt_headers)))
|
||||
# write body
|
||||
fp = open(self.tmp_filename, mode="r", encoding="utf-8")
|
||||
for line in fp:
|
||||
outfile.write(line)
|
||||
outfile.write('</table>\n</body>\n</html>')
|
||||
fp.close()
|
||||
os.remove(self.tmp_filename)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@@ -6,6 +5,7 @@ from .chat_processor import ChatProcessor
|
||||
|
||||
PATTERN = re.compile(r"(.*)\(([0-9]+)\)$")
|
||||
|
||||
|
||||
class JsonfileArchiver(ChatProcessor):
|
||||
"""
|
||||
JsonfileArchiver saves chat data as text of JSON lines.
|
||||
@@ -17,12 +17,13 @@ class JsonfileArchiver(ChatProcessor):
|
||||
it is automatically saved under a different name
|
||||
with suffix '(number)'
|
||||
"""
|
||||
def __init__(self,save_path):
|
||||
|
||||
def __init__(self, save_path):
|
||||
super().__init__()
|
||||
self.save_path = self._checkpath(save_path)
|
||||
self.line_counter = 0
|
||||
|
||||
def process(self,chat_components: list):
|
||||
def process(self, chat_components: list):
|
||||
"""
|
||||
Returns
|
||||
----------
|
||||
@@ -32,19 +33,23 @@ class JsonfileArchiver(ChatProcessor):
|
||||
total_lines : int :
|
||||
count of total lines written to the file.
|
||||
"""
|
||||
if chat_components is None: return
|
||||
with open(self.save_path, mode='a', encoding = 'utf-8') as f:
|
||||
if chat_components is None:
|
||||
return
|
||||
with open(self.save_path, mode='a', encoding='utf-8') as f:
|
||||
for component in chat_components:
|
||||
if component is None: continue
|
||||
if component is None:
|
||||
continue
|
||||
chatdata = component.get('chatdata')
|
||||
if chatdata is None: continue
|
||||
if chatdata is None:
|
||||
continue
|
||||
for action in chatdata:
|
||||
if action is None: continue
|
||||
json_line = json.dumps(action, ensure_ascii = False)
|
||||
f.writelines(json_line+'\n')
|
||||
self.line_counter+=1
|
||||
return { "save_path" : self.save_path,
|
||||
"total_lines": self.line_counter }
|
||||
if action is None:
|
||||
continue
|
||||
json_line = json.dumps(action, ensure_ascii=False)
|
||||
f.writelines(json_line + '\n')
|
||||
self.line_counter += 1
|
||||
return {"save_path": self.save_path,
|
||||
"total_lines": self.line_counter}
|
||||
|
||||
def _checkpath(self, filepath):
|
||||
splitter = os.path.splitext(os.path.basename(filepath))
|
||||
@@ -53,14 +58,12 @@ class JsonfileArchiver(ChatProcessor):
|
||||
newpath = filepath
|
||||
counter = 0
|
||||
while os.path.exists(newpath):
|
||||
match = re.search(PATTERN,body)
|
||||
match = re.search(PATTERN, body)
|
||||
if match:
|
||||
counter=int(match[2])+1
|
||||
counter = int(match[2]) + 1
|
||||
num_with_bracket = f'({str(counter)})'
|
||||
body = f'{match[1]}{num_with_bracket}'
|
||||
else:
|
||||
body = f'{body}({str(counter)})'
|
||||
newpath = os.path.join(os.path.dirname(filepath),body+extention)
|
||||
newpath = os.path.join(os.path.dirname(filepath), body + extention)
|
||||
return newpath
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
import datetime
|
||||
import time
|
||||
from .chat_processor import ChatProcessor
|
||||
##version 2
|
||||
|
||||
|
||||
class SimpleDisplayProcessor(ChatProcessor):
|
||||
|
||||
def process(self, chat_components: list):
|
||||
@@ -12,36 +8,42 @@ class SimpleDisplayProcessor(ChatProcessor):
|
||||
timeout = 0
|
||||
|
||||
if chat_components is None:
|
||||
return {"timeout":timeout, "chatlist":chatlist}
|
||||
return {"timeout": timeout, "chatlist": chatlist}
|
||||
for component in chat_components:
|
||||
timeout += component.get('timeout', 0)
|
||||
chatdata = component.get('chatdata')
|
||||
|
||||
if chatdata is None:break
|
||||
if chatdata is None:
|
||||
break
|
||||
for action in chatdata:
|
||||
if action is None:continue
|
||||
if action.get('addChatItemAction') is None:continue
|
||||
if action['addChatItemAction'].get('item') is None:continue
|
||||
if action is None:
|
||||
continue
|
||||
if action.get('addChatItemAction') is None:
|
||||
continue
|
||||
if action['addChatItemAction'].get('item') is None:
|
||||
continue
|
||||
|
||||
root = action['addChatItemAction']['item'].get('liveChatTextMessageRenderer')
|
||||
root = action['addChatItemAction']['item'].get(
|
||||
'liveChatTextMessageRenderer')
|
||||
|
||||
if root:
|
||||
author_name = root['authorName']['simpleText']
|
||||
message = self._parse_message(root.get('message'))
|
||||
purchase_amount_text = ''
|
||||
else:
|
||||
root = ( action['addChatItemAction']['item'].get('liveChatPaidMessageRenderer') or
|
||||
action['addChatItemAction']['item'].get('liveChatPaidStickerRenderer') )
|
||||
root = (action['addChatItemAction']['item'].get('liveChatPaidMessageRenderer')
|
||||
or action['addChatItemAction']['item'].get('liveChatPaidStickerRenderer'))
|
||||
if root:
|
||||
author_name = root['authorName']['simpleText']
|
||||
message = self._parse_message(root.get('message'))
|
||||
purchase_amount_text = root['purchaseAmountText']['simpleText']
|
||||
else:
|
||||
continue
|
||||
chatlist.append(f'[{author_name}]: {message} {purchase_amount_text}')
|
||||
return {"timeout":timeout, "chatlist":chatlist}
|
||||
chatlist.append(
|
||||
f'[{author_name}]: {message} {purchase_amount_text}')
|
||||
return {"timeout": timeout, "chatlist": chatlist}
|
||||
|
||||
def _parse_message(self,message):
|
||||
def _parse_message(self, message):
|
||||
if message is None:
|
||||
return ''
|
||||
if message.get('simpleText'):
|
||||
@@ -51,11 +53,9 @@ class SimpleDisplayProcessor(ChatProcessor):
|
||||
tmp = ''
|
||||
for run in runs:
|
||||
if run.get('emoji'):
|
||||
tmp+=(run['emoji']['shortcuts'][0])
|
||||
tmp += (run['emoji']['shortcuts'][0])
|
||||
elif run.get('text'):
|
||||
tmp+=(run['text'])
|
||||
tmp += (run['text'])
|
||||
return tmp
|
||||
else:
|
||||
return ''
|
||||
|
||||
|
||||
|
||||
@@ -5,6 +5,8 @@ Calculate speed of chat.
|
||||
"""
|
||||
import time
|
||||
from .. chat_processor import ChatProcessor
|
||||
|
||||
|
||||
class RingQueue:
|
||||
"""
|
||||
リング型キュー
|
||||
@@ -50,17 +52,17 @@ class RingQueue:
|
||||
"""
|
||||
if self.mergin:
|
||||
self.items.append(item)
|
||||
self.last_pos = len(self.items)-1
|
||||
if self.last_pos == self.capacity-1:
|
||||
self.last_pos = len(self.items) - 1
|
||||
if self.last_pos == self.capacity - 1:
|
||||
self.mergin = False
|
||||
return
|
||||
self.last_pos += 1
|
||||
if self.last_pos > self.capacity-1:
|
||||
if self.last_pos > self.capacity - 1:
|
||||
self.last_pos = 0
|
||||
self.items[self.last_pos] = item
|
||||
|
||||
self.first_pos += 1
|
||||
if self.first_pos > self.capacity-1:
|
||||
if self.first_pos > self.capacity - 1:
|
||||
self.first_pos = 0
|
||||
|
||||
def get(self):
|
||||
@@ -77,6 +79,7 @@ class RingQueue:
|
||||
def item_count(self):
|
||||
return len(self.items)
|
||||
|
||||
|
||||
class SpeedCalculator(ChatProcessor, RingQueue):
|
||||
"""
|
||||
チャットの勢いを計算する。
|
||||
@@ -91,7 +94,7 @@ class SpeedCalculator(ChatProcessor, RingQueue):
|
||||
RingQueueに格納するチャット勢い算出用データの最大数
|
||||
"""
|
||||
|
||||
def __init__(self, capacity = 10):
|
||||
def __init__(self, capacity=10):
|
||||
super().__init__(capacity)
|
||||
self.speed = 0
|
||||
|
||||
@@ -106,7 +109,6 @@ class SpeedCalculator(ChatProcessor, RingQueue):
|
||||
self.speed = self._calc_speed()
|
||||
return self.speed
|
||||
|
||||
|
||||
def _calc_speed(self):
|
||||
"""
|
||||
RingQueue内のチャット勢い算出用データリストを元に、
|
||||
@@ -117,13 +119,12 @@ class SpeedCalculator(ChatProcessor, RingQueue):
|
||||
チャット速度(1分間で換算したチャット数)
|
||||
"""
|
||||
try:
|
||||
#キュー内の総チャット数
|
||||
# キュー内の総チャット数
|
||||
total = sum(item['chat_count'] for item in self.items)
|
||||
#キュー内の最初と最後のチャットの時間差
|
||||
duration = (self.items[self.last_pos]['endtime']
|
||||
- self.items[self.first_pos]['starttime'])
|
||||
# キュー内の最初と最後のチャットの時間差
|
||||
duration = (self.items[self.last_pos]['endtime'] - self.items[self.first_pos]['starttime'])
|
||||
if duration != 0:
|
||||
return int(total*60/duration)
|
||||
return int(total * 60 / duration)
|
||||
return 0
|
||||
except IndexError:
|
||||
return 0
|
||||
@@ -145,59 +146,58 @@ class SpeedCalculator(ChatProcessor, RingQueue):
|
||||
'''
|
||||
timestamp_now = int(time.time())
|
||||
self.put({
|
||||
'chat_count':0,
|
||||
'starttime':int(timestamp_now),
|
||||
'endtime':int(timestamp_now)
|
||||
'chat_count': 0,
|
||||
'starttime': int(timestamp_now),
|
||||
'endtime': int(timestamp_now)
|
||||
})
|
||||
|
||||
def _get_timestamp(action :dict):
|
||||
def _get_timestamp(action: dict):
|
||||
"""
|
||||
チャットデータから時刻データを取り出す。
|
||||
"""
|
||||
try:
|
||||
item = action['addChatItemAction']['item']
|
||||
timestamp = int(item[list(item.keys())[0]]['timestampUsec'])
|
||||
except (KeyError,TypeError):
|
||||
except (KeyError, TypeError):
|
||||
return None
|
||||
return timestamp
|
||||
|
||||
if actions is None or len(actions)==0:
|
||||
if actions is None or len(actions) == 0:
|
||||
_put_emptydata()
|
||||
return
|
||||
|
||||
#actions内の時刻データを持つチャットデータの数
|
||||
counter=0
|
||||
#actions内の最初のチャットデータの時刻
|
||||
starttime= None
|
||||
#actions内の最後のチャットデータの時刻
|
||||
endtime=None
|
||||
# actions内の時刻データを持つチャットデータの数
|
||||
counter = 0
|
||||
# actions内の最初のチャットデータの時刻
|
||||
starttime = None
|
||||
# actions内の最後のチャットデータの時刻
|
||||
endtime = None
|
||||
|
||||
for action in actions:
|
||||
#チャットデータからtimestampUsecを読み取る
|
||||
# チャットデータからtimestampUsecを読み取る
|
||||
gettime = _get_timestamp(action)
|
||||
|
||||
#時刻のないデータだった場合は次の行のデータで読み取り試行
|
||||
# 時刻のないデータだった場合は次の行のデータで読み取り試行
|
||||
if gettime is None:
|
||||
continue
|
||||
|
||||
#最初に有効な時刻を持つデータのtimestampをstarttimeに設定
|
||||
# 最初に有効な時刻を持つデータのtimestampをstarttimeに設定
|
||||
if starttime is None:
|
||||
starttime = gettime
|
||||
|
||||
#最後のtimestampを設定(途中で時刻のないデータの場合もあるので上書きしていく)
|
||||
# 最後のtimestampを設定(途中で時刻のないデータの場合もあるので上書きしていく)
|
||||
endtime = gettime
|
||||
|
||||
#チャットの数をインクリメント
|
||||
# チャットの数をインクリメント
|
||||
counter += 1
|
||||
|
||||
#チャット速度用のデータをRingQueueに送る
|
||||
# チャット速度用のデータをRingQueueに送る
|
||||
if starttime is None or endtime is None:
|
||||
_put_emptydata()
|
||||
return
|
||||
|
||||
self.put({
|
||||
'chat_count':counter,
|
||||
'starttime':int(starttime/1000000),
|
||||
'endtime':int(endtime/1000000)
|
||||
'chat_count': counter,
|
||||
'starttime': int(starttime / 1000000),
|
||||
'endtime': int(endtime / 1000000)
|
||||
})
|
||||
|
||||
|
||||
@@ -15,10 +15,12 @@ items_sticker = [
|
||||
'liveChatPaidStickerRenderer'
|
||||
]
|
||||
|
||||
|
||||
class SuperchatCalculator(ChatProcessor):
|
||||
"""
|
||||
Calculate the amount of SuperChat by currency.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.results = {}
|
||||
|
||||
@@ -34,14 +36,16 @@ class SuperchatCalculator(ChatProcessor):
|
||||
return self.results
|
||||
for component in chat_components:
|
||||
chatdata = component.get('chatdata')
|
||||
if chatdata is None: continue
|
||||
if chatdata is None:
|
||||
continue
|
||||
for action in chatdata:
|
||||
renderer = self._get_item(action, items_paid) or \
|
||||
self._get_item(action, items_sticker)
|
||||
if renderer is None: continue
|
||||
if renderer is None:
|
||||
continue
|
||||
symbol, amount = self._parse(renderer)
|
||||
self.results.setdefault(symbol,0)
|
||||
self.results[symbol]+=amount
|
||||
self.results.setdefault(symbol, 0)
|
||||
self.results[symbol] += amount
|
||||
return self.results
|
||||
|
||||
def _parse(self, renderer):
|
||||
@@ -49,7 +53,7 @@ class SuperchatCalculator(ChatProcessor):
|
||||
m = superchat_regex.search(purchase_amount_text)
|
||||
if m:
|
||||
symbol = m.group(1)
|
||||
amount = float(m.group(2).replace(',',''))
|
||||
amount = float(m.group(2).replace(',', ''))
|
||||
else:
|
||||
symbol = ""
|
||||
amount = 0.0
|
||||
@@ -69,6 +73,3 @@ class SuperchatCalculator(ChatProcessor):
|
||||
continue
|
||||
return None
|
||||
return dict_body
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -5,8 +5,9 @@ from .chat_processor import ChatProcessor
|
||||
from .default.processor import DefaultProcessor
|
||||
|
||||
PATTERN = re.compile(r"(.*)\(([0-9]+)\)$")
|
||||
fmt_headers = ['datetime','elapsed','authorName','message','superchatAmount'
|
||||
,'authorType','authorChannel']
|
||||
fmt_headers = ['datetime', 'elapsed', 'authorName', 'message',
|
||||
'superchatAmount', 'authorType', 'authorChannel']
|
||||
|
||||
|
||||
class TSVArchiver(ChatProcessor):
|
||||
'''
|
||||
@@ -16,7 +17,7 @@ class TSVArchiver(ChatProcessor):
|
||||
def __init__(self, save_path):
|
||||
super().__init__()
|
||||
self.save_path = self._checkpath(save_path)
|
||||
with open(self.save_path, mode='a', encoding = 'utf-8') as f:
|
||||
with open(self.save_path, mode='a', encoding='utf-8') as f:
|
||||
writer = csv.writer(f, delimiter='\t')
|
||||
writer.writerow(fmt_headers)
|
||||
self.processor = DefaultProcessor()
|
||||
@@ -28,14 +29,14 @@ class TSVArchiver(ChatProcessor):
|
||||
newpath = filepath
|
||||
counter = 0
|
||||
while os.path.exists(newpath):
|
||||
match = re.search(PATTERN,body)
|
||||
match = re.search(PATTERN, body)
|
||||
if match:
|
||||
counter=int(match[2])+1
|
||||
counter = int(match[2]) + 1
|
||||
num_with_bracket = f'({str(counter)})'
|
||||
body = f'{match[1]}{num_with_bracket}'
|
||||
else:
|
||||
body = f'{body}({str(counter)})'
|
||||
newpath = os.path.join(os.path.dirname(filepath),body+extention)
|
||||
newpath = os.path.join(os.path.dirname(filepath), body + extention)
|
||||
return newpath
|
||||
|
||||
def process(self, chat_components: list):
|
||||
@@ -48,10 +49,10 @@ class TSVArchiver(ChatProcessor):
|
||||
total_lines : int :
|
||||
count of total lines written to the file.
|
||||
"""
|
||||
if chat_components is None or len (chat_components) == 0:
|
||||
if chat_components is None or len(chat_components) == 0:
|
||||
return
|
||||
|
||||
with open(self.save_path, mode='a', encoding = 'utf-8') as f:
|
||||
with open(self.save_path, mode='a', encoding='utf-8') as f:
|
||||
writer = csv.writer(f, delimiter='\t')
|
||||
chats = self.processor.process(chat_components).items
|
||||
for c in chats:
|
||||
@@ -64,7 +65,3 @@ class TSVArchiver(ChatProcessor):
|
||||
c.author.type,
|
||||
c.author.channelId
|
||||
])
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,131 +0,0 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
from . import parser
|
||||
from . block import Block
|
||||
from . worker import ExtractWorker
|
||||
from . patch import Patch
|
||||
from ... import config
|
||||
from ... paramgen import arcparam
|
||||
from concurrent.futures import CancelledError
|
||||
from urllib.parse import quote
|
||||
|
||||
headers = config.headers
|
||||
REPLAY_URL = "https://www.youtube.com/live_chat_replay/" \
|
||||
"get_live_chat_replay?continuation="
|
||||
|
||||
def _split(start, end, count, min_interval_sec = 120):
|
||||
"""
|
||||
Split section from `start` to `end` into `count` pieces,
|
||||
and returns the beginning of each piece.
|
||||
The `count` is adjusted so that the length of each piece
|
||||
is no smaller than `min_interval`.
|
||||
|
||||
Returns:
|
||||
--------
|
||||
List of the offset of each block's first chat data.
|
||||
"""
|
||||
|
||||
if not (isinstance(start,int) or isinstance(start,float)) or \
|
||||
not (isinstance(end,int) or isinstance(end,float)):
|
||||
raise ValueError("start/end must be int or float")
|
||||
if not isinstance(count,int):
|
||||
raise ValueError("count must be int")
|
||||
if start>end:
|
||||
raise ValueError("end must be equal to or greater than start.")
|
||||
if count<1:
|
||||
raise ValueError("count must be equal to or greater than 1.")
|
||||
if (end-start)/count < min_interval_sec:
|
||||
count = int((end-start)/min_interval_sec)
|
||||
if count == 0 : count = 1
|
||||
interval= (end-start)/count
|
||||
|
||||
if count == 1:
|
||||
return [start]
|
||||
return sorted( list(set( [int(start + interval*j)
|
||||
for j in range(count) ])))
|
||||
|
||||
def ready_blocks(video_id, duration, div, callback):
|
||||
if div <= 0: raise ValueError
|
||||
|
||||
async def _get_blocks( video_id, duration, div, callback):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [_create_block(session, video_id, seektime, callback)
|
||||
for seektime in _split(-1, duration, div)]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
async def _create_block(session, video_id, seektime, callback):
|
||||
continuation = arcparam.getparam(video_id, seektime = seektime)
|
||||
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
|
||||
async with session.get(url, headers = headers) as resp:
|
||||
text = await resp.text()
|
||||
next_continuation, actions = parser.parse(json.loads(text))
|
||||
if actions:
|
||||
first = parser.get_offset(actions[0])
|
||||
last = parser.get_offset(actions[-1])
|
||||
if callback:
|
||||
callback(actions,last-first)
|
||||
return Block(
|
||||
continuation = next_continuation,
|
||||
chat_data = actions,
|
||||
first = first,
|
||||
last = last
|
||||
)
|
||||
"""
|
||||
fetch initial blocks.
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
blocks = loop.run_until_complete(
|
||||
_get_blocks(video_id, duration, div, callback))
|
||||
return blocks
|
||||
|
||||
def fetch_patch(callback, blocks, video_id):
|
||||
|
||||
async def _allocate_workers():
|
||||
workers = [
|
||||
ExtractWorker(
|
||||
fetch = _fetch, block = block,
|
||||
blocks = blocks, video_id = video_id
|
||||
)
|
||||
for block in blocks
|
||||
]
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [worker.run(session) for worker in workers]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
async def _fetch(continuation,session) -> Patch:
|
||||
url = f"{REPLAY_URL}{quote(continuation)}&pbj=1"
|
||||
async with session.get(url,headers = config.headers) as resp:
|
||||
chat_json = await resp.text()
|
||||
continuation, actions = parser.parse(json.loads(chat_json))
|
||||
if actions:
|
||||
last = parser.get_offset(actions[-1])
|
||||
first = parser.get_offset(actions[0])
|
||||
if callback:
|
||||
callback(actions, last - first)
|
||||
return Patch(actions, continuation, first, last)
|
||||
return Patch(continuation = continuation)
|
||||
"""
|
||||
allocate workers and assign blocks.
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(_allocate_workers())
|
||||
except CancelledError:
|
||||
pass
|
||||
|
||||
async def _shutdown():
|
||||
print("\nshutdown...")
|
||||
tasks = [t for t in asyncio.all_tasks()
|
||||
if t is not asyncio.current_task()]
|
||||
for task in tasks:
|
||||
task.cancel()
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
def cancel():
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(_shutdown())
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from . import parser
|
||||
class Block:
|
||||
"""Block object represents something like a box
|
||||
to join chunk of chatdata.
|
||||
|
||||
Parameter:
|
||||
---------
|
||||
first : int :
|
||||
videoOffsetTimeMs of the first chat_data
|
||||
(chat_data[0])
|
||||
|
||||
last : int :
|
||||
videoOffsetTimeMs of the last chat_data.
|
||||
(chat_data[-1])
|
||||
|
||||
this value increases as fetching chatdata progresses.
|
||||
|
||||
end : int :
|
||||
target videoOffsetTimeMs of last chat data for extract,
|
||||
equals to first videoOffsetTimeMs of next block.
|
||||
when extract worker reaches this offset, stop fetching.
|
||||
|
||||
continuation : str :
|
||||
continuation param of last chat data.
|
||||
|
||||
chat_data : list
|
||||
|
||||
done : bool :
|
||||
whether this block has been fetched.
|
||||
|
||||
remaining : int :
|
||||
remaining data to extract.
|
||||
equals end - last.
|
||||
|
||||
is_last : bool :
|
||||
whether this block is the last one in blocklist.
|
||||
|
||||
during_split : bool :
|
||||
whether this block is in the process of during_split.
|
||||
while True, this block is excluded from duplicate split procedure.
|
||||
"""
|
||||
|
||||
__slots__ = ['first','last','end','continuation','chat_data','remaining',
|
||||
'done','is_last','during_split']
|
||||
|
||||
def __init__(self, first = 0, last = 0, end = 0,
|
||||
continuation = '', chat_data = [], is_last = False,
|
||||
during_split = False):
|
||||
self.first = first
|
||||
self.last = last
|
||||
self.end = end
|
||||
self.continuation = continuation
|
||||
self.chat_data = chat_data
|
||||
self.done = False
|
||||
self.remaining = self.end - self.last
|
||||
self.is_last = is_last
|
||||
self.during_split = during_split
|
||||
@@ -1,153 +0,0 @@
|
||||
from . import parser
|
||||
|
||||
def check_duplicate(chatdata):
|
||||
max_range = len(chatdata)-1
|
||||
tbl_offset = [None] * max_range
|
||||
tbl_id = [None] * max_range
|
||||
tbl_type = [None] * max_range
|
||||
|
||||
def create_table(chatdata, max_range):
|
||||
for i in range(max_range):
|
||||
tbl_offset[i] = parser.get_offset(chatdata[i])
|
||||
tbl_id[i] = parser.get_id(chatdata[i])
|
||||
tbl_type[i] = parser.get_type(chatdata[i])
|
||||
|
||||
def is_duplicate(i, j):
|
||||
return (
|
||||
tbl_offset[i] == tbl_offset[j]
|
||||
and
|
||||
tbl_id[i] == tbl_id[j]
|
||||
and
|
||||
tbl_type[i] == tbl_type[j]
|
||||
)
|
||||
print("creating table...")
|
||||
create_table(chatdata,max_range)
|
||||
print("searching duplicate data...")
|
||||
return [{ "i":{
|
||||
"index" : i, "id" : parser.get_id(chatdata[i]),
|
||||
"offsetTime" : parser.get_offset(chatdata[i]),
|
||||
"type" : parser.get_type(chatdata[i])
|
||||
},
|
||||
"j":{
|
||||
"index" : j, "id" : parser.get_id(chatdata[j]),
|
||||
"offsetTime" : parser.get_offset(chatdata[j]),
|
||||
"type" : parser.get_type(chatdata[j])
|
||||
}
|
||||
}
|
||||
for i in range(max_range) for j in range(i+1,max_range)
|
||||
if is_duplicate(i,j)]
|
||||
|
||||
|
||||
def check_duplicate_offset(chatdata):
|
||||
max_range = len(chatdata)
|
||||
tbl_offset = [None] * max_range
|
||||
tbl_id = [None] * max_range
|
||||
tbl_type = [None] * max_range
|
||||
|
||||
def create_table(chatdata, max_range):
|
||||
for i in range(max_range):
|
||||
tbl_offset[i] = parser.get_offset(chatdata[i])
|
||||
tbl_id[i] = parser.get_id(chatdata[i])
|
||||
tbl_type[i] = parser.get_type(chatdata[i])
|
||||
|
||||
def is_duplicate(i, j):
|
||||
return (
|
||||
tbl_offset[i] == tbl_offset[j]
|
||||
and
|
||||
tbl_id[i] == tbl_id[j]
|
||||
)
|
||||
|
||||
print("creating table...")
|
||||
create_table(chatdata,max_range)
|
||||
print("searching duplicate data...")
|
||||
|
||||
return [{
|
||||
"index" : i, "id" : tbl_id[i],
|
||||
"offsetTime" : tbl_offset[i],
|
||||
"type:" : tbl_type[i]
|
||||
}
|
||||
for i in range(max_range-1)
|
||||
if is_duplicate(i,i+1)]
|
||||
|
||||
def remove_duplicate_head(blocks):
|
||||
if len(blocks) == 1 : return blocks
|
||||
|
||||
def is_duplicate_head(index):
|
||||
|
||||
if len(blocks[index].chat_data) == 0:
|
||||
return True
|
||||
elif len(blocks[index+1].chat_data) == 0:
|
||||
return False
|
||||
|
||||
id_0 = parser.get_id(blocks[index].chat_data[0])
|
||||
id_1 = parser.get_id(blocks[index+1].chat_data[0])
|
||||
type_0 = parser.get_type(blocks[index].chat_data[0])
|
||||
type_1 = parser.get_type(blocks[index+1].chat_data[0])
|
||||
return (
|
||||
blocks[index].first == blocks[index+1].first
|
||||
and
|
||||
id_0 == id_1
|
||||
and
|
||||
type_0 == type_1
|
||||
)
|
||||
ret = [blocks[i] for i in range(len(blocks)-1)
|
||||
if (len(blocks[i].chat_data)>0 and
|
||||
not is_duplicate_head(i) )]
|
||||
ret.append(blocks[-1])
|
||||
return ret
|
||||
|
||||
def remove_duplicate_tail(blocks):
|
||||
if len(blocks) == 1 : return blocks
|
||||
|
||||
def is_duplicate_tail(index):
|
||||
if len(blocks[index].chat_data) == 0:
|
||||
return True
|
||||
elif len(blocks[index-1].chat_data) == 0:
|
||||
return False
|
||||
id_0 = parser.get_id(blocks[index-1].chat_data[-1])
|
||||
id_1 = parser.get_id(blocks[index].chat_data[-1])
|
||||
type_0 = parser.get_type(blocks[index-1].chat_data[-1])
|
||||
type_1 = parser.get_type(blocks[index].chat_data[-1])
|
||||
return (
|
||||
blocks[index-1].last == blocks[index].last
|
||||
and
|
||||
id_0 == id_1
|
||||
and
|
||||
type_0 == type_1
|
||||
)
|
||||
|
||||
ret = [blocks[i] for i in range(0,len(blocks))
|
||||
if i == 0 or not is_duplicate_tail(i) ]
|
||||
return ret
|
||||
|
||||
def remove_overlap(blocks):
|
||||
"""
|
||||
Fix overlapped blocks after ready_blocks().
|
||||
Align the last offset of each block to the first offset
|
||||
of next block (equals `end` offset of each block).
|
||||
"""
|
||||
if len(blocks) == 1 : return blocks
|
||||
|
||||
for block in blocks:
|
||||
if block.is_last:
|
||||
break
|
||||
if len(block.chat_data)==0:
|
||||
continue
|
||||
block_end = block.end
|
||||
if block.last >= block_end:
|
||||
for line in reversed(block.chat_data):
|
||||
if parser.get_offset(line) < block_end:
|
||||
break
|
||||
block.chat_data.pop()
|
||||
block.last = parser.get_offset(line)
|
||||
block.remaining=0
|
||||
block.done=True
|
||||
block.continuation = None
|
||||
return blocks
|
||||
|
||||
|
||||
|
||||
def _dump(blocks):
|
||||
print(f"---------- first last end---")
|
||||
for i,block in enumerate(blocks):
|
||||
print(f"block[{i:3}] {block.first:>10} {block.last:>10} {block.end:>10}")
|
||||
@@ -1,91 +0,0 @@
|
||||
from . import asyncdl
|
||||
from . import duplcheck
|
||||
from . import parser
|
||||
from .. videoinfo import VideoInfo
|
||||
from ... import config
|
||||
from ... exceptions import InvalidVideoIdException
|
||||
|
||||
logger = config.logger(__name__)
|
||||
headers=config.headers
|
||||
|
||||
class Extractor:
|
||||
def __init__(self, video_id, div = 1, callback = None, processor = None):
|
||||
if not isinstance(div ,int) or div < 1:
|
||||
raise ValueError('div must be positive integer.')
|
||||
elif div > 10:
|
||||
div = 10
|
||||
self.video_id = video_id
|
||||
self.div = div
|
||||
self.callback = callback
|
||||
self.processor = processor
|
||||
self.duration = self._get_duration_of_video(video_id)
|
||||
self.blocks = []
|
||||
|
||||
def _get_duration_of_video(self, video_id):
|
||||
duration = 0
|
||||
try:
|
||||
duration = VideoInfo(video_id).get_duration()
|
||||
except InvalidVideoIdException:
|
||||
raise
|
||||
return duration
|
||||
|
||||
def _ready_blocks(self):
|
||||
blocks = asyncdl.ready_blocks(
|
||||
self.video_id, self.duration, self.div, self.callback)
|
||||
self.blocks = [block for block in blocks if block]
|
||||
return self
|
||||
|
||||
def _remove_duplicate_head(self):
|
||||
self.blocks = duplcheck.remove_duplicate_head(self.blocks)
|
||||
return self
|
||||
|
||||
def _set_block_end(self):
|
||||
for i in range(len(self.blocks)-1):
|
||||
self.blocks[i].end = self.blocks[i+1].first
|
||||
self.blocks[-1].end = self.duration*1000
|
||||
self.blocks[-1].is_last =True
|
||||
return self
|
||||
|
||||
def _remove_overlap(self):
|
||||
self.blocks = duplcheck.remove_overlap(self.blocks)
|
||||
return self
|
||||
|
||||
def _download_blocks(self):
|
||||
asyncdl.fetch_patch(self.callback, self.blocks, self.video_id)
|
||||
return self
|
||||
|
||||
def _remove_duplicate_tail(self):
|
||||
self.blocks = duplcheck.remove_duplicate_tail(self.blocks)
|
||||
return self
|
||||
|
||||
def _combine(self):
|
||||
ret = []
|
||||
for block in self.blocks:
|
||||
ret.extend(block.chat_data)
|
||||
return ret
|
||||
|
||||
def _execute_extract_operations(self):
|
||||
return (
|
||||
self._ready_blocks()
|
||||
._remove_duplicate_head()
|
||||
._set_block_end()
|
||||
._remove_overlap()
|
||||
._download_blocks()
|
||||
._remove_duplicate_tail()
|
||||
._combine()
|
||||
)
|
||||
|
||||
def extract(self):
|
||||
if self.duration == 0:
|
||||
print("video is not archived.")
|
||||
return []
|
||||
data = self._execute_extract_operations()
|
||||
if self.processor is None:
|
||||
return data
|
||||
return self.processor.process(
|
||||
[{'video_id':None,'timeout':1,'chatdata' : (action
|
||||
["replayChatItemAction"]["actions"][0] for action in data)}]
|
||||
)
|
||||
|
||||
def cancel(self):
|
||||
asyncdl.cancel()
|
||||
@@ -1,54 +0,0 @@
|
||||
import json
|
||||
from ... import config
|
||||
from ... exceptions import (
|
||||
ResponseContextError,
|
||||
NoContentsException,
|
||||
NoContinuationsException )
|
||||
|
||||
logger = config.logger(__name__)
|
||||
|
||||
def parse(jsn):
|
||||
"""
|
||||
Parse replay chat data.
|
||||
Parameter:
|
||||
----------
|
||||
jsn : dict
|
||||
JSON of replay chat data.
|
||||
Returns:
|
||||
------
|
||||
continuation : str
|
||||
actions : list
|
||||
|
||||
"""
|
||||
if jsn is None:
|
||||
raise ValueError("parameter JSON is None")
|
||||
if jsn['response']['responseContext'].get('errors'):
|
||||
raise ResponseContextError(
|
||||
'video_id is invalid or private/deleted.')
|
||||
contents=jsn['response'].get('continuationContents')
|
||||
if contents is None:
|
||||
raise NoContentsException('No chat data.')
|
||||
|
||||
cont = contents['liveChatContinuation']['continuations'][0]
|
||||
if cont is None:
|
||||
raise NoContinuationsException('No Continuation')
|
||||
metadata = cont.get('liveChatReplayContinuationData')
|
||||
if metadata:
|
||||
continuation = metadata.get("continuation")
|
||||
actions = contents['liveChatContinuation'].get('actions')
|
||||
return continuation, actions
|
||||
return None, []
|
||||
|
||||
|
||||
def get_offset(item):
|
||||
return int(item['replayChatItemAction']["videoOffsetTimeMsec"])
|
||||
|
||||
def get_id(item):
|
||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
||||
)[0])['item'].values())[0].get('id')
|
||||
|
||||
def get_type(item):
|
||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
||||
)[0])['item'].keys())[0]
|
||||
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
from . import parser
|
||||
from . block import Block
|
||||
from typing import NamedTuple
|
||||
|
||||
class Patch(NamedTuple):
|
||||
"""
|
||||
Patch represents chunk of chat data
|
||||
which is fetched by asyncdl.fetch_patch._fetch().
|
||||
"""
|
||||
chats : list = []
|
||||
continuation : str = None
|
||||
first : int = None
|
||||
last : int = None
|
||||
|
||||
def fill(block:Block, patch:Patch):
|
||||
block_end = block.end
|
||||
if patch.last < block_end or block.is_last:
|
||||
set_patch(block, patch)
|
||||
return
|
||||
for line in reversed(patch.chats):
|
||||
line_offset = parser.get_offset(line)
|
||||
if line_offset < block_end:
|
||||
break
|
||||
patch.chats.pop()
|
||||
set_patch(block, patch._replace(
|
||||
continuation = None,
|
||||
last = line_offset
|
||||
)
|
||||
)
|
||||
block.remaining=0
|
||||
block.done=True
|
||||
|
||||
|
||||
def split(parent_block:Block, child_block:Block, patch:Patch):
|
||||
parent_block.during_split = False
|
||||
if patch.first <= parent_block.last:
|
||||
''' When patch overlaps with parent_block,
|
||||
discard this block. '''
|
||||
child_block.continuation = None
|
||||
''' Leave child_block.during_split == True
|
||||
to exclude from during_split sequence. '''
|
||||
return
|
||||
child_block.during_split = False
|
||||
child_block.first = patch.first
|
||||
parent_block.end = patch.first
|
||||
fill(child_block, patch)
|
||||
|
||||
|
||||
def set_patch(block:Block, patch:Patch):
|
||||
block.continuation = patch.continuation
|
||||
block.chat_data.extend(patch.chats)
|
||||
block.last = patch.last
|
||||
block.remaining = block.end-block.last
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
from . import parser
|
||||
from . block import Block
|
||||
from . patch import Patch, fill, split
|
||||
from ... paramgen import arcparam
|
||||
|
||||
class ExtractWorker:
|
||||
"""
|
||||
ExtractWorker associates a download session with a block.
|
||||
|
||||
When the worker finishes fetching, the block
|
||||
being fetched is splitted and assigned the free worker.
|
||||
|
||||
Parameter
|
||||
----------
|
||||
fetch : func :
|
||||
extract function of asyncdl
|
||||
|
||||
block : Block :
|
||||
Block object that includes chat_data
|
||||
|
||||
blocks : list :
|
||||
List of Block(s)
|
||||
|
||||
video_id : str :
|
||||
|
||||
parent_block : Block :
|
||||
the block from which current block is splitted
|
||||
"""
|
||||
__slots__ = ['block', 'fetch', 'blocks', 'video_id', 'parent_block']
|
||||
|
||||
def __init__(self, fetch, block, blocks, video_id ):
|
||||
self.block = block
|
||||
self.fetch = fetch
|
||||
self.blocks = blocks
|
||||
self.video_id = video_id
|
||||
self.parent_block = None
|
||||
|
||||
async def run(self, session):
|
||||
while self.block.continuation:
|
||||
patch = await self.fetch(
|
||||
self.block.continuation, session)
|
||||
if patch.continuation is None:
|
||||
"""TODO : make the worker assigned to the last block
|
||||
to work more than twice as possible.
|
||||
"""
|
||||
break
|
||||
if self.parent_block:
|
||||
split(self.parent_block, self.block, patch)
|
||||
self.parent_block = None
|
||||
else:
|
||||
fill(self.block, patch)
|
||||
if self.block.continuation is None:
|
||||
"""finished fetching this block """
|
||||
self.block.done = True
|
||||
self.block = _search_new_block(self)
|
||||
|
||||
def _search_new_block(worker) -> Block:
|
||||
index, undone_block = _get_undone_block(worker.blocks)
|
||||
if undone_block is None:
|
||||
return Block(continuation = None)
|
||||
mean = (undone_block.last + undone_block.end)/2
|
||||
continuation = arcparam.getparam(worker.video_id, seektime = mean/1000)
|
||||
worker.parent_block = undone_block
|
||||
worker.parent_block.during_split = True
|
||||
new_block = Block(
|
||||
end = undone_block.end,
|
||||
chat_data = [],
|
||||
continuation = continuation,
|
||||
during_split = True,
|
||||
is_last = worker.parent_block.is_last)
|
||||
'''swap last block'''
|
||||
if worker.parent_block.is_last:
|
||||
worker.parent_block.is_last = False
|
||||
worker.blocks.insert(index+1, new_block)
|
||||
return new_block
|
||||
|
||||
def _get_undone_block(blocks) -> (int, Block):
|
||||
min_interval_ms = 120000
|
||||
max_remaining = 0
|
||||
undone_block = None
|
||||
index_undone_block = 0
|
||||
for index, block in enumerate(blocks):
|
||||
if block.done or block.during_split:
|
||||
continue
|
||||
remaining = block.remaining
|
||||
if remaining > max_remaining and remaining > min_interval_ms:
|
||||
index_undone_block = index
|
||||
undone_block = block
|
||||
max_remaining = remaining
|
||||
return index_undone_block, undone_block
|
||||
@@ -1,141 +0,0 @@
|
||||
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
from . import parser
|
||||
from . block import Block
|
||||
from . worker import ExtractWorker
|
||||
from . patch import Patch
|
||||
from ... import config
|
||||
from ... paramgen import arcparam_mining as arcparam
|
||||
from concurrent.futures import CancelledError
|
||||
from urllib.parse import quote
|
||||
|
||||
headers = config.headers
|
||||
REPLAY_URL = "https://www.youtube.com/live_chat_replay?continuation="
|
||||
INTERVAL = 1
|
||||
def _split(start, end, count, min_interval_sec = 120):
|
||||
"""
|
||||
Split section from `start` to `end` into `count` pieces,
|
||||
and returns the beginning of each piece.
|
||||
The `count` is adjusted so that the length of each piece
|
||||
is no smaller than `min_interval`.
|
||||
|
||||
Returns:
|
||||
--------
|
||||
List of the offset of each block's first chat data.
|
||||
"""
|
||||
|
||||
if not (isinstance(start,int) or isinstance(start,float)) or \
|
||||
not (isinstance(end,int) or isinstance(end,float)):
|
||||
raise ValueError("start/end must be int or float")
|
||||
if not isinstance(count,int):
|
||||
raise ValueError("count must be int")
|
||||
if start>end:
|
||||
raise ValueError("end must be equal to or greater than start.")
|
||||
if count<1:
|
||||
raise ValueError("count must be equal to or greater than 1.")
|
||||
if (end-start)/count < min_interval_sec:
|
||||
count = int((end-start)/min_interval_sec)
|
||||
if count == 0 : count = 1
|
||||
interval= (end-start)/count
|
||||
|
||||
if count == 1:
|
||||
return [start]
|
||||
return sorted( list(set( [int(start + interval*j)
|
||||
for j in range(count) ])))
|
||||
|
||||
def ready_blocks(video_id, duration, div, callback):
|
||||
if div <= 0: raise ValueError
|
||||
|
||||
async def _get_blocks( video_id, duration, div, callback):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [_create_block(session, video_id, seektime, callback)
|
||||
for seektime in _split(0, duration, div)]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
|
||||
async def _create_block(session, video_id, seektime, callback):
|
||||
continuation = arcparam.getparam(video_id, seektime = seektime)
|
||||
url=(f"{REPLAY_URL}{quote(continuation)}&playerOffsetMs="
|
||||
f"{int(seektime*1000)}&hidden=false&pbj=1")
|
||||
async with session.get(url, headers = headers) as resp:
|
||||
chat_json = await resp.text()
|
||||
if chat_json is None:
|
||||
return
|
||||
continuation, actions = parser.parse(json.loads(chat_json)[1])
|
||||
first = seektime
|
||||
seektime += INTERVAL
|
||||
if callback:
|
||||
callback(actions, INTERVAL)
|
||||
return Block(
|
||||
continuation = continuation,
|
||||
chat_data = actions,
|
||||
first = first,
|
||||
last = seektime,
|
||||
seektime = seektime
|
||||
)
|
||||
"""
|
||||
fetch initial blocks.
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
blocks = loop.run_until_complete(
|
||||
_get_blocks(video_id, duration, div, callback))
|
||||
return blocks
|
||||
|
||||
def fetch_patch(callback, blocks, video_id):
|
||||
|
||||
async def _allocate_workers():
|
||||
workers = [
|
||||
ExtractWorker(
|
||||
fetch = _fetch, block = block,
|
||||
blocks = blocks, video_id = video_id
|
||||
)
|
||||
for block in blocks
|
||||
]
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [worker.run(session) for worker in workers]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
async def _fetch(seektime,session) -> Patch:
|
||||
continuation = arcparam.getparam(video_id, seektime = seektime)
|
||||
url=(f"{REPLAY_URL}{quote(continuation)}&playerOffsetMs="
|
||||
f"{int(seektime*1000)}&hidden=false&pbj=1")
|
||||
async with session.get(url,headers = config.headers) as resp:
|
||||
chat_json = await resp.text()
|
||||
actions = []
|
||||
try:
|
||||
if chat_json is None:
|
||||
return Patch()
|
||||
continuation, actions = parser.parse(json.loads(chat_json)[1])
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
if callback:
|
||||
callback(actions, INTERVAL)
|
||||
return Patch(chats = actions, continuation = continuation,
|
||||
seektime = seektime, last = seektime)
|
||||
"""
|
||||
allocate workers and assign blocks.
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(_allocate_workers())
|
||||
except CancelledError:
|
||||
pass
|
||||
|
||||
async def _shutdown():
|
||||
print("\nshutdown...")
|
||||
tasks = [t for t in asyncio.all_tasks()
|
||||
if t is not asyncio.current_task()]
|
||||
for task in tasks:
|
||||
task.cancel()
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
def cancel():
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(_shutdown())
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
from . import parser
|
||||
class Block:
|
||||
"""Block object represents something like a box
|
||||
to join chunk of chatdata.
|
||||
|
||||
Parameter:
|
||||
---------
|
||||
first : int :
|
||||
videoOffsetTimeMs of the first chat_data
|
||||
(chat_data[0])
|
||||
|
||||
last : int :
|
||||
videoOffsetTimeMs of the last chat_data.
|
||||
(chat_data[-1])
|
||||
|
||||
this value increases as fetching chatdata progresses.
|
||||
|
||||
end : int :
|
||||
target videoOffsetTimeMs of last chat data for extract,
|
||||
equals to first videoOffsetTimeMs of next block.
|
||||
when extract worker reaches this offset, stop fetching.
|
||||
|
||||
continuation : str :
|
||||
continuation param of last chat data.
|
||||
|
||||
chat_data : list
|
||||
|
||||
done : bool :
|
||||
whether this block has been fetched.
|
||||
|
||||
remaining : int :
|
||||
remaining data to extract.
|
||||
equals end - last.
|
||||
|
||||
is_last : bool :
|
||||
whether this block is the last one in blocklist.
|
||||
|
||||
during_split : bool :
|
||||
whether this block is in the process of during_split.
|
||||
while True, this block is excluded from duplicate split procedure.
|
||||
|
||||
seektime : float :
|
||||
the last position of this block(seconds) already fetched.
|
||||
"""
|
||||
|
||||
__slots__ = ['first','last','end','continuation','chat_data','remaining',
|
||||
'done','is_last','during_split','seektime']
|
||||
|
||||
def __init__(self, first = 0, last = 0, end = 0,
|
||||
continuation = '', chat_data = [], is_last = False,
|
||||
during_split = False, seektime = None):
|
||||
self.first = first
|
||||
self.last = last
|
||||
self.end = end
|
||||
self.continuation = continuation
|
||||
self.chat_data = chat_data
|
||||
self.done = False
|
||||
self.remaining = self.end - self.last
|
||||
self.is_last = is_last
|
||||
self.during_split = during_split
|
||||
self.seektime = seektime
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
import json
|
||||
from ... import config
|
||||
from ... exceptions import (
|
||||
ResponseContextError,
|
||||
NoContentsException,
|
||||
NoContinuationsException )
|
||||
|
||||
logger = config.logger(__name__)
|
||||
|
||||
def parse(jsn):
|
||||
"""
|
||||
Parse replay chat data.
|
||||
Parameter:
|
||||
----------
|
||||
jsn : dict
|
||||
JSON of replay chat data.
|
||||
Returns:
|
||||
------
|
||||
continuation : str
|
||||
actions : list
|
||||
|
||||
"""
|
||||
if jsn is None:
|
||||
raise ValueError("parameter JSON is None")
|
||||
if jsn['response']['responseContext'].get('errors'):
|
||||
raise ResponseContextError(
|
||||
'video_id is invalid or private/deleted.')
|
||||
contents=jsn["response"].get('continuationContents')
|
||||
if contents is None:
|
||||
raise NoContentsException('No chat data.')
|
||||
|
||||
cont = contents['liveChatContinuation']['continuations'][0]
|
||||
if cont is None:
|
||||
raise NoContinuationsException('No Continuation')
|
||||
metadata = cont.get('liveChatReplayContinuationData')
|
||||
if metadata:
|
||||
continuation = metadata.get("continuation")
|
||||
actions = contents['liveChatContinuation'].get('actions')
|
||||
if continuation:
|
||||
return continuation, [action["replayChatItemAction"]["actions"][0]
|
||||
for action in actions
|
||||
if list(action['replayChatItemAction']["actions"][0].values()
|
||||
)[0]['item'].get("liveChatPaidMessageRenderer")
|
||||
or list(action['replayChatItemAction']["actions"][0].values()
|
||||
)[0]['item'].get("liveChatPaidStickerRenderer")
|
||||
]
|
||||
return None, []
|
||||
|
||||
|
||||
def get_offset(item):
|
||||
return int(item['replayChatItemAction']["videoOffsetTimeMsec"])
|
||||
|
||||
def get_id(item):
|
||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
||||
)[0])['item'].values())[0].get('id')
|
||||
|
||||
def get_type(item):
|
||||
return list((list(item['replayChatItemAction']["actions"][0].values()
|
||||
)[0])['item'].keys())[0]
|
||||
import re
|
||||
_REGEX_YTINIT = re.compile("window\\[\"ytInitialData\"\\]\\s*=\\s*({.+?});\\s+")
|
||||
def extract(text):
|
||||
|
||||
match = re.findall(_REGEX_YTINIT, str(text))
|
||||
if match:
|
||||
return match[0]
|
||||
return None
|
||||
@@ -1,27 +0,0 @@
|
||||
from . import parser
|
||||
from . block import Block
|
||||
from typing import NamedTuple
|
||||
|
||||
class Patch(NamedTuple):
|
||||
"""
|
||||
Patch represents chunk of chat data
|
||||
which is fetched by asyncdl.fetch_patch._fetch().
|
||||
"""
|
||||
chats : list = []
|
||||
continuation : str = None
|
||||
seektime : float = None
|
||||
first : int = None
|
||||
last : int = None
|
||||
|
||||
def fill(block:Block, patch:Patch):
|
||||
if patch.last < block.end:
|
||||
set_patch(block, patch)
|
||||
return
|
||||
block.continuation = None
|
||||
|
||||
def set_patch(block:Block, patch:Patch):
|
||||
block.continuation = patch.continuation
|
||||
block.chat_data.extend(patch.chats)
|
||||
block.last = patch.seektime
|
||||
block.seektime = patch.seektime
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
from . import asyncdl
|
||||
from . import parser
|
||||
from .. videoinfo import VideoInfo
|
||||
from ... import config
|
||||
from ... exceptions import InvalidVideoIdException
|
||||
logger = config.logger(__name__)
|
||||
headers=config.headers
|
||||
|
||||
class SuperChatMiner:
|
||||
def __init__(self, video_id, duration, div, callback):
|
||||
if not isinstance(div ,int) or div < 1:
|
||||
raise ValueError('div must be positive integer.')
|
||||
elif div > 10:
|
||||
div = 10
|
||||
if not isinstance(duration ,int) or duration < 1:
|
||||
raise ValueError('duration must be positive integer.')
|
||||
self.video_id = video_id
|
||||
self.duration = duration
|
||||
self.div = div
|
||||
self.callback = callback
|
||||
self.blocks = []
|
||||
|
||||
def _ready_blocks(self):
|
||||
blocks = asyncdl.ready_blocks(
|
||||
self.video_id, self.duration, self.div, self.callback)
|
||||
self.blocks = [block for block in blocks if block is not None]
|
||||
return self
|
||||
|
||||
def _set_block_end(self):
|
||||
for i in range(len(self.blocks)-1):
|
||||
self.blocks[i].end = self.blocks[i+1].first
|
||||
self.blocks[-1].end = self.duration
|
||||
self.blocks[-1].is_last =True
|
||||
return self
|
||||
|
||||
def _download_blocks(self):
|
||||
asyncdl.fetch_patch(self.callback, self.blocks, self.video_id)
|
||||
return self
|
||||
|
||||
def _combine(self):
|
||||
ret = []
|
||||
for block in self.blocks:
|
||||
ret.extend(block.chat_data)
|
||||
return ret
|
||||
|
||||
def extract(self):
|
||||
return (
|
||||
self._ready_blocks()
|
||||
._set_block_end()
|
||||
._download_blocks()
|
||||
._combine()
|
||||
)
|
||||
|
||||
def extract(video_id, div = 1, callback = None, processor = None):
|
||||
duration = 0
|
||||
try:
|
||||
duration = VideoInfo(video_id).get_duration()
|
||||
except InvalidVideoIdException:
|
||||
raise
|
||||
if duration == 0:
|
||||
print("video is live.")
|
||||
return []
|
||||
data = SuperChatMiner(video_id, duration, div, callback).extract()
|
||||
if processor is None:
|
||||
return data
|
||||
return processor.process(
|
||||
[{'video_id':None,'timeout':1,'chatdata' : (action
|
||||
for action in data)}]
|
||||
)
|
||||
|
||||
def cancel():
|
||||
asyncdl.cancel()
|
||||
@@ -1,45 +0,0 @@
|
||||
from . import parser
|
||||
from . block import Block
|
||||
from . patch import Patch, fill
|
||||
from ... paramgen import arcparam
|
||||
INTERVAL = 1
|
||||
class ExtractWorker:
|
||||
"""
|
||||
ExtractWorker associates a download session with a block.
|
||||
|
||||
When the worker finishes fetching, the block
|
||||
being fetched is splitted and assigned the free worker.
|
||||
|
||||
Parameter
|
||||
----------
|
||||
fetch : func :
|
||||
extract function of asyncdl
|
||||
|
||||
block : Block :
|
||||
Block object that includes chat_data
|
||||
|
||||
blocks : list :
|
||||
List of Block(s)
|
||||
|
||||
video_id : str :
|
||||
|
||||
parent_block : Block :
|
||||
the block from which current block is splitted
|
||||
"""
|
||||
__slots__ = ['block', 'fetch', 'blocks', 'video_id', 'parent_block']
|
||||
def __init__(self, fetch, block, blocks, video_id ):
|
||||
self.block:Block = block
|
||||
self.fetch = fetch
|
||||
self.blocks:list = blocks
|
||||
self.video_id:str = video_id
|
||||
self.parent_block:Block = None
|
||||
|
||||
async def run(self, session):
|
||||
while self.block.continuation:
|
||||
patch = await self.fetch(
|
||||
self.block.seektime, session)
|
||||
fill(self.block, patch)
|
||||
self.block.seektime += INTERVAL
|
||||
self.block.done = True
|
||||
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
import json
|
||||
import re
|
||||
import requests
|
||||
from .. import config
|
||||
from .. import util
|
||||
from ..exceptions import InvalidVideoIdException
|
||||
|
||||
headers = config.headers
|
||||
|
||||
pattern = re.compile(r"yt\.setConfig\({'PLAYER_CONFIG': ({.*})}\);")
|
||||
|
||||
item_channel_id =[
|
||||
"videoDetails",
|
||||
"embeddedPlayerOverlayVideoDetailsRenderer",
|
||||
"channelThumbnailEndpoint",
|
||||
"channelThumbnailEndpoint",
|
||||
"urlEndpoint",
|
||||
"urlEndpoint",
|
||||
"url"
|
||||
]
|
||||
|
||||
item_renderer = [
|
||||
"embedPreview",
|
||||
"thumbnailPreviewRenderer"
|
||||
]
|
||||
|
||||
item_response = [
|
||||
"args",
|
||||
"embedded_player_response"
|
||||
]
|
||||
|
||||
item_author_image =[
|
||||
"videoDetails",
|
||||
"embeddedPlayerOverlayVideoDetailsRenderer",
|
||||
"channelThumbnail",
|
||||
"thumbnails",
|
||||
0,
|
||||
"url"
|
||||
]
|
||||
|
||||
item_thumbnail = [
|
||||
"defaultThumbnail",
|
||||
"thumbnails",
|
||||
2,
|
||||
"url"
|
||||
]
|
||||
|
||||
item_channel_name = [
|
||||
"videoDetails",
|
||||
"embeddedPlayerOverlayVideoDetailsRenderer",
|
||||
"expandedRenderer",
|
||||
"embeddedPlayerOverlayVideoDetailsExpandedRenderer",
|
||||
"title",
|
||||
"runs",
|
||||
0,
|
||||
"text"
|
||||
]
|
||||
|
||||
item_moving_thumbnail = [
|
||||
"movingThumbnail",
|
||||
"thumbnails",
|
||||
0,
|
||||
"url"
|
||||
]
|
||||
|
||||
class VideoInfo:
|
||||
'''
|
||||
VideoInfo object retrieves YouTube video information.
|
||||
|
||||
Parameter
|
||||
---------
|
||||
video_id : str
|
||||
|
||||
Exception
|
||||
---------
|
||||
InvalidVideoIdException :
|
||||
Occurs when video_id does not exist on YouTube.
|
||||
'''
|
||||
def __init__(self, video_id):
|
||||
self.video_id = video_id
|
||||
text = self._get_page_text(video_id)
|
||||
self._parse(text)
|
||||
|
||||
def _get_page_text(self, video_id):
|
||||
url = f"https://www.youtube.com/embed/{video_id}"
|
||||
resp = requests.get(url, headers = headers)
|
||||
resp.raise_for_status()
|
||||
return resp.text
|
||||
|
||||
def _parse(self, text):
|
||||
result = re.search(pattern, text)
|
||||
res= json.loads(result.group(1))
|
||||
response = self._get_item(res, item_response)
|
||||
if response is None:
|
||||
self._check_video_is_private(res.get("args"))
|
||||
self._renderer = self._get_item(json.loads(response), item_renderer)
|
||||
if self._renderer is None:
|
||||
raise InvalidVideoIdException(
|
||||
f"No renderer found in video_id: [{self.video_id}].")
|
||||
|
||||
def _check_video_is_private(self,args):
|
||||
if args and args.get("video_id"):
|
||||
raise InvalidVideoIdException(
|
||||
f"video_id [{self.video_id}] is private or deleted.")
|
||||
raise InvalidVideoIdException(
|
||||
f"video_id [{self.video_id}] is invalid.")
|
||||
|
||||
def _get_item(self, dict_body, items: list):
|
||||
for item in items:
|
||||
if dict_body is None:
|
||||
break
|
||||
if isinstance(dict_body, dict):
|
||||
dict_body = dict_body.get(item)
|
||||
continue
|
||||
if isinstance(item, int) and \
|
||||
isinstance(dict_body, list) and \
|
||||
len(dict_body) > item:
|
||||
dict_body = dict_body[item]
|
||||
continue
|
||||
return None
|
||||
return dict_body
|
||||
|
||||
def get_duration(self):
|
||||
duration_seconds = self._renderer.get("videoDurationSeconds")
|
||||
if duration_seconds:
|
||||
'''Fetched value is string, so cast to integer.'''
|
||||
return int(duration_seconds)
|
||||
'''When key is not found, explicitly returns None.'''
|
||||
return None
|
||||
|
||||
def get_title(self):
|
||||
if self._renderer.get("title"):
|
||||
return [''.join(run["text"])
|
||||
for run in self._renderer["title"]["runs"]][0]
|
||||
return None
|
||||
|
||||
def get_channel_id(self):
|
||||
channel_url = self._get_item(self._renderer, item_channel_id)
|
||||
if channel_url:
|
||||
return channel_url[9:]
|
||||
return None
|
||||
|
||||
def get_author_image(self):
|
||||
return self._get_item(self._renderer, item_author_image)
|
||||
|
||||
def get_thumbnail(self):
|
||||
return self._get_item(self._renderer, item_thumbnail)
|
||||
|
||||
def get_channel_name(self):
|
||||
return self._get_item(self._renderer, item_channel_name)
|
||||
|
||||
def get_moving_thumbnail(self):
|
||||
return self._get_item(self._renderer, item_moving_thumbnail)
|
||||
@@ -1,15 +1,147 @@
|
||||
import requests,json,datetime
|
||||
import datetime
|
||||
import httpx
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import quote
|
||||
from .. import config
|
||||
from .. exceptions import InvalidVideoIdException
|
||||
|
||||
PATTERN = re.compile(r"(.*)\(([0-9]+)\)$")
|
||||
|
||||
PATTERN_YTURL = re.compile(r"((?<=(v|V)/)|(?<=be/)|(?<=(\?|\&)v=)|(?<=embed/))([\w-]+)")
|
||||
|
||||
PATTERN_CHANNEL = re.compile(r"\\\"channelId\\\":\\\"(.{24})\\\"")
|
||||
|
||||
PATTERN_M_CHANNEL = re.compile(r"\"channelId\":\"(.{24})\"")
|
||||
|
||||
YT_VIDEO_ID_LENGTH = 11
|
||||
|
||||
CLIENT_VERSION = ''.join(("2.", (datetime.datetime.today() - datetime.timedelta(days=1)).strftime("%Y%m%d"), ".01.00"))
|
||||
|
||||
UA = config.headers["user-agent"]
|
||||
|
||||
|
||||
def extract(url):
|
||||
_session = requests.Session()
|
||||
_session = httpx.Client(http2=True)
|
||||
html = _session.get(url, headers=config.headers)
|
||||
with open(str(datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')
|
||||
)+'test.json',mode ='w',encoding='utf-8') as f:
|
||||
json.dump(html.json(),f,ensure_ascii=False)
|
||||
) + 'test.json', mode='w', encoding='utf-8') as f:
|
||||
json.dump(html.json(), f, ensure_ascii=False)
|
||||
|
||||
|
||||
def save(data,filename,extention):
|
||||
with open(filename+"_"+(datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')
|
||||
)+extention,mode ='w',encoding='utf-8') as f:
|
||||
def save(data, filename, extention) -> str:
|
||||
save_filename = filename + "_" + \
|
||||
(datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')) + extention
|
||||
with open(save_filename, mode='w', encoding='utf-8') as f:
|
||||
f.writelines(data)
|
||||
return save_filename
|
||||
|
||||
|
||||
def checkpath(filepath):
|
||||
splitter = os.path.splitext(os.path.basename(filepath))
|
||||
body = splitter[0]
|
||||
extention = splitter[1]
|
||||
newpath = filepath
|
||||
counter = 1
|
||||
while os.path.exists(newpath):
|
||||
match = re.search(PATTERN, body)
|
||||
if match:
|
||||
counter = int(match[2]) + 1
|
||||
num_with_bracket = f'({str(counter)})'
|
||||
body = f'{match[1]}{num_with_bracket}'
|
||||
else:
|
||||
body = f'{body}({str(counter)})'
|
||||
newpath = os.path.join(os.path.dirname(filepath), body + extention)
|
||||
return newpath
|
||||
|
||||
|
||||
def get_param(continuation, replay=False, offsetms: int = 0, dat=''):
|
||||
if offsetms < 0:
|
||||
offsetms = 0
|
||||
ret = {
|
||||
"context": {
|
||||
"client": {
|
||||
"visitorData": dat,
|
||||
"userAgent": UA,
|
||||
"clientName": "WEB",
|
||||
"clientVersion": CLIENT_VERSION,
|
||||
},
|
||||
|
||||
},
|
||||
"continuation": continuation,
|
||||
}
|
||||
if replay:
|
||||
ret.setdefault("currentPlayerState", {
|
||||
"playerOffsetMs": str(int(offsetms))})
|
||||
return ret
|
||||
|
||||
|
||||
def extract_video_id(url_or_id: str) -> str:
|
||||
ret = ''
|
||||
if '[' in url_or_id:
|
||||
url_or_id = url_or_id.replace('[', '').replace(']', '')
|
||||
|
||||
if type(url_or_id) != str:
|
||||
raise TypeError(f"{url_or_id}: URL or VideoID must be str, but {type(url_or_id)} is passed.")
|
||||
if len(url_or_id) == YT_VIDEO_ID_LENGTH:
|
||||
return url_or_id
|
||||
match = re.search(PATTERN_YTURL, url_or_id)
|
||||
if match is None:
|
||||
raise InvalidVideoIdException(f"Invalid video id: {url_or_id}")
|
||||
try:
|
||||
ret = match.group(4)
|
||||
except IndexError:
|
||||
raise InvalidVideoIdException(f"Invalid video id: {url_or_id}")
|
||||
|
||||
if ret is None or len(ret) != YT_VIDEO_ID_LENGTH:
|
||||
raise InvalidVideoIdException(f"Invalid video id: {url_or_id}")
|
||||
return ret
|
||||
|
||||
|
||||
def get_channelid(client, video_id):
|
||||
resp = client.get("https://www.youtube.com/embed/{}".format(quote(video_id)), headers=config.headers)
|
||||
match = re.search(PATTERN_CHANNEL, resp.text)
|
||||
try:
|
||||
if match is None:
|
||||
raise IndexError
|
||||
ret = match.group(1)
|
||||
except IndexError:
|
||||
ret = get_channelid_2nd(client, video_id)
|
||||
return ret
|
||||
|
||||
|
||||
def get_channelid_2nd(client, video_id):
|
||||
resp = client.get("https://m.youtube.com/watch?v={}".format(quote(video_id)), headers=config.m_headers)
|
||||
|
||||
match = re.search(PATTERN_M_CHANNEL, resp.text)
|
||||
if match is None:
|
||||
raise InvalidVideoIdException(f"Cannot find channel id for video id:{video_id}. This video id seems to be invalid.")
|
||||
try:
|
||||
ret = match.group(1)
|
||||
except IndexError:
|
||||
raise InvalidVideoIdException(f"Invalid video id: {video_id}")
|
||||
return ret
|
||||
|
||||
|
||||
async def get_channelid_async(client, video_id):
|
||||
resp = await client.get("https://www.youtube.com/embed/{}".format(quote(video_id)), headers=config.headers)
|
||||
match = re.search(PATTERN_CHANNEL, resp.text)
|
||||
try:
|
||||
if match is None:
|
||||
raise IndexError
|
||||
ret = match.group(1)
|
||||
except IndexError:
|
||||
ret = await get_channelid_async_2nd(client, video_id)
|
||||
return ret
|
||||
|
||||
async def get_channelid_async_2nd(client, video_id):
|
||||
resp = await client.get("https://m.youtube.com/watch?v={}".format(quote(video_id)), headers=config.m_headers)
|
||||
match = re.search(PATTERN_M_CHANNEL, resp.text)
|
||||
if match is None:
|
||||
raise InvalidVideoIdException(f"Cannot find channel id for video id:{video_id}. This video id seems to be invalid.")
|
||||
try:
|
||||
ret = match.group(1)
|
||||
except IndexError:
|
||||
raise InvalidVideoIdException(f"Invalid video id: {video_id}")
|
||||
return ret
|
||||
@@ -1,5 +1 @@
|
||||
aiohttp
|
||||
argumentparser
|
||||
pytz
|
||||
requests
|
||||
urllib3
|
||||
httpx[http2]==0.16.1
|
||||
@@ -1,5 +1,2 @@
|
||||
aioresponses
|
||||
mock
|
||||
mocker
|
||||
pytest
|
||||
pytest-mock
|
||||
pytest-httpx
|
||||
|
||||
5
setup.py
5
setup.py
@@ -55,11 +55,6 @@ setup(
|
||||
'License :: OSI Approved :: MIT License',
|
||||
],
|
||||
description="a python library for fetching youtube live chat.",
|
||||
entry_points=
|
||||
'''
|
||||
[console_scripts]
|
||||
pytchat=pytchat.cli:main
|
||||
''',
|
||||
install_requires=_requirements(),
|
||||
keywords='youtube livechat asyncio',
|
||||
license=license,
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import pytest
|
||||
from pytchat.parser.live import Parser
|
||||
import pytchat.config as config
|
||||
import requests, json
|
||||
from pytchat.paramgen import arcparam
|
||||
|
||||
def test_arcparam_0(mocker):
|
||||
param = arcparam.getparam("01234567890",-1)
|
||||
assert param == "op2w0wRyGjxDZzhhRFFvTE1ERXlNelExTmpjNE9UQWFFLXFvM2JrQkRRb0xNREV5TXpRMU5qYzRPVEFnQVElM0QlM0QoADAAOABAAEgEUhwIABAAGAAgACoOc3RhdGljY2hlY2tzdW1AAFgDYAFoAHIECAEQAHgA"
|
||||
|
||||
def test_arcparam_1(mocker):
|
||||
param = arcparam.getparam("01234567890", seektime = 100000)
|
||||
assert param == "op2w0wR3GjxDZzhhRFFvTE1ERXlNelExTmpjNE9UQWFFLXFvM2JrQkRRb0xNREV5TXpRMU5qYzRPVEFnQVElM0QlM0QogNDbw_QCMAA4AEAASANSHAgAEAAYACAAKg5zdGF0aWNjaGVja3N1bUAAWANgAWgAcgQIARAAeAA%3D"
|
||||
|
||||
def test_arcparam_2(mocker):
|
||||
param = arcparam.getparam("SsjCnHOk-Sk")
|
||||
url=f"https://www.youtube.com/live_chat_replay/get_live_chat_replay?continuation={param}&pbj=1"
|
||||
resp = requests.Session().get(url,headers = config.headers)
|
||||
jsn = json.loads(resp.text)
|
||||
parser = Parser(is_replay=True)
|
||||
contents= parser.get_contents(jsn)
|
||||
_ , chatdata = parser.parse(contents)
|
||||
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatTextMessageRenderer"]["id"]
|
||||
assert test_id == "CjoKGkNMYXBzZTdudHVVQ0Zjc0IxZ0FkTnFnQjVREhxDSnlBNHV2bnR1VUNGV0dnd2dvZDd3NE5aZy0w"
|
||||
|
||||
def test_arcparam_3(mocker):
|
||||
param = arcparam.getparam("01234567890")
|
||||
assert param == "op2w0wRyGjxDZzhhRFFvTE1ERXlNelExTmpjNE9UQWFFLXFvM2JrQkRRb0xNREV5TXpRMU5qYzRPVEFnQVElM0QlM0QoATAAOABAAEgDUhwIABAAGAAgACoOc3RhdGljY2hlY2tzdW1AAFgDYAFoAHIECAEQAHgA"
|
||||
@@ -1,40 +0,0 @@
|
||||
import pytest
|
||||
from pytchat.tool.mining import parser
|
||||
import pytchat.config as config
|
||||
import requests, json
|
||||
from pytchat.paramgen import arcparam_mining as arcparam
|
||||
|
||||
def test_arcparam_e(mocker):
|
||||
try:
|
||||
arcparam.getparam("01234567890",-1)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
|
||||
|
||||
|
||||
def test_arcparam_0(mocker):
|
||||
param = arcparam.getparam("01234567890",0)
|
||||
|
||||
assert param =="op2w0wQsGiBDZzhhRFFvTE1ERXlNelExTmpjNE9UQWdBUSUzRCUzREABYARyAggBeAE%3D"
|
||||
|
||||
|
||||
def test_arcparam_1(mocker):
|
||||
param = arcparam.getparam("01234567890", seektime = 100000)
|
||||
print(param)
|
||||
assert param == "op2w0wQzGiBDZzhhRFFvTE1ERXlNelExTmpjNE9UQWdBUSUzRCUzREABWgUQgMLXL2AEcgIIAXgB"
|
||||
|
||||
def test_arcparam_2(mocker):
|
||||
param = arcparam.getparam("PZz9NB0-Z64",1)
|
||||
url=f"https://www.youtube.com/live_chat_replay?continuation={param}&playerOffsetMs=1000&pbj=1"
|
||||
resp = requests.Session().get(url,headers = config.headers)
|
||||
jsn = json.loads(resp.text)
|
||||
_ , chatdata = parser.parse(jsn[1])
|
||||
test_id = chatdata[0]["addChatItemAction"]["item"]["liveChatPaidMessageRenderer"]["id"]
|
||||
print(test_id)
|
||||
assert test_id == "ChwKGkNKSGE0YnFJeWVBQ0ZWcUF3Z0VkdGIwRm9R"
|
||||
|
||||
def test_arcparam_3(mocker):
|
||||
param = arcparam.getparam("01234567890")
|
||||
assert param == "op2w0wQsGiBDZzhhRFFvTE1ERXlNelExTmpjNE9UQWdBUSUzRCUzREABYARyAggBeAE%3D"
|
||||
@@ -6,50 +6,55 @@ parse = SuperchatCalculator()._parse
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def load_chatdata(filepath):
|
||||
parser = Parser(is_replay=True)
|
||||
#print(json.loads(_open_file(filepath)))
|
||||
contents = parser.get_contents( json.loads(_open_file(filepath)))
|
||||
# print(json.loads(_open_file(filepath)))
|
||||
contents = parser.get_contents(json.loads(_open_file(filepath)))[0]
|
||||
return parser.parse(contents)[1]
|
||||
|
||||
|
||||
|
||||
def test_parse_1():
|
||||
renderer ={"purchaseAmountText":{"simpleText":"¥2,000"}}
|
||||
symbol ,amount = parse(renderer)
|
||||
renderer = {"purchaseAmountText": {"simpleText": "¥2,000"}}
|
||||
symbol, amount = parse(renderer)
|
||||
assert symbol == '¥'
|
||||
assert amount == 2000.0
|
||||
|
||||
|
||||
def test_parse_2():
|
||||
renderer ={"purchaseAmountText":{"simpleText":"ABC\x0a200"}}
|
||||
symbol ,amount = parse(renderer)
|
||||
renderer = {"purchaseAmountText": {"simpleText": "ABC\x0a200"}}
|
||||
symbol, amount = parse(renderer)
|
||||
assert symbol == 'ABC\x0a'
|
||||
assert amount == 200.0
|
||||
|
||||
|
||||
def test_process_0():
|
||||
"""
|
||||
parse superchat data
|
||||
"""
|
||||
chat_component = {
|
||||
'video_id':'',
|
||||
'timeout':10,
|
||||
'chatdata':load_chatdata(r"tests\testdata\calculator\superchat_0.json")
|
||||
'video_id': '',
|
||||
'timeout': 10,
|
||||
'chatdata': load_chatdata(r"tests/testdata/calculator/superchat_0.json")
|
||||
}
|
||||
assert SuperchatCalculator().process([chat_component])=={'¥': 6800.0, '€': 2.0}
|
||||
assert SuperchatCalculator().process([chat_component]) == {
|
||||
'¥': 6800.0, '€': 2.0}
|
||||
|
||||
|
||||
def test_process_1():
|
||||
"""
|
||||
parse no superchat data
|
||||
"""
|
||||
chat_component = {
|
||||
'video_id':'',
|
||||
'timeout':10,
|
||||
'chatdata':load_chatdata(r"tests\testdata\calculator\text_only.json")
|
||||
'video_id': '',
|
||||
'timeout': 10,
|
||||
'chatdata': load_chatdata(r"tests/testdata/calculator/text_only.json")
|
||||
}
|
||||
assert SuperchatCalculator().process([chat_component])=={}
|
||||
assert SuperchatCalculator().process([chat_component]) == {}
|
||||
|
||||
|
||||
def test_process_2():
|
||||
"""
|
||||
@@ -57,12 +62,11 @@ def test_process_2():
|
||||
"""
|
||||
try:
|
||||
chat_component = {
|
||||
'video_id':'',
|
||||
'timeout':10,
|
||||
'chatdata':load_chatdata(r"tests\testdata\calculator\replay_end.json")
|
||||
'video_id': '',
|
||||
'timeout': 10,
|
||||
'chatdata': load_chatdata(r"tests/testdata/calculator/replay_end.json")
|
||||
}
|
||||
assert False
|
||||
SuperchatCalculator().process([chat_component])
|
||||
except ChatParseException:
|
||||
assert True
|
||||
|
||||
|
||||
@@ -1,37 +1,28 @@
|
||||
import json
|
||||
import pytest
|
||||
import asyncio,aiohttp
|
||||
from pytchat.parser.live import Parser
|
||||
from pytchat.processors.compatible.processor import CompatibleProcessor
|
||||
from pytchat.exceptions import (
|
||||
NoLivechatRendererException,NoYtinitialdataException,
|
||||
ResponseContextError, NoContentsException)
|
||||
|
||||
from pytchat.processors.compatible.renderer.textmessage import LiveChatTextMessageRenderer
|
||||
from pytchat.processors.compatible.renderer.paidmessage import LiveChatPaidMessageRenderer
|
||||
from pytchat.processors.compatible.renderer.paidsticker import LiveChatPaidStickerRenderer
|
||||
from pytchat.processors.compatible.renderer.legacypaid import LiveChatLegacyPaidMessageRenderer
|
||||
|
||||
parser = Parser(is_replay=False)
|
||||
|
||||
|
||||
def test_textmessage(mocker):
|
||||
'''api互換processorのテスト:通常テキストメッセージ'''
|
||||
processor = CompatibleProcessor()
|
||||
|
||||
_json = _open_file("tests/testdata/compatible/textmessage.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id" : "",
|
||||
"timeout" : 7,
|
||||
"chatdata" : chatdata
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
|
||||
assert ret["kind"]== "youtube#liveChatMessageListResponse"
|
||||
assert ret["pollingIntervalMillis"]==data["timeout"]*1000
|
||||
assert ret["kind"] == "youtube#liveChatMessageListResponse"
|
||||
assert ret["pollingIntervalMillis"] == data["timeout"] * 1000
|
||||
assert ret.keys() == {
|
||||
"kind", "etag", "pageInfo", "nextPageToken","pollingIntervalMillis","items"
|
||||
"kind", "etag", "pageInfo", "nextPageToken", "pollingIntervalMillis", "items"
|
||||
}
|
||||
assert ret["pageInfo"].keys() == {
|
||||
"totalResults", "resultsPerPage"
|
||||
@@ -40,16 +31,19 @@ def test_textmessage(mocker):
|
||||
"kind", "etag", "id", "snippet", "authorDetails"
|
||||
}
|
||||
assert ret["items"][0]["snippet"].keys() == {
|
||||
'type', 'liveChatId', 'authorChannelId', 'publishedAt', 'hasDisplayContent', 'displayMessage', 'textMessageDetails'
|
||||
'type', 'liveChatId', 'authorChannelId', 'publishedAt', 'hasDisplayContent', 'displayMessage',
|
||||
'textMessageDetails'
|
||||
}
|
||||
assert ret["items"][0]["authorDetails"].keys() == {
|
||||
'channelId', 'channelUrl', 'displayName', 'profileImageUrl', 'isVerified', 'isChatOwner', 'isChatSponsor', 'isChatModerator'
|
||||
'channelId', 'channelUrl', 'displayName', 'profileImageUrl', 'isVerified', 'isChatOwner', 'isChatSponsor',
|
||||
'isChatModerator'
|
||||
}
|
||||
assert ret["items"][0]["snippet"]["textMessageDetails"].keys() == {
|
||||
'messageText'
|
||||
}
|
||||
assert "LCC." in ret["items"][0]["id"]
|
||||
assert ret["items"][0]["snippet"]["type"]=="textMessageEvent"
|
||||
assert ret["items"][0]["snippet"]["type"] == "textMessageEvent"
|
||||
|
||||
|
||||
def test_newsponcer(mocker):
|
||||
'''api互換processorのテスト:メンバ新規登録'''
|
||||
@@ -57,34 +51,72 @@ def test_newsponcer(mocker):
|
||||
|
||||
_json = _open_file("tests/testdata/compatible/newSponsor.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id" : "",
|
||||
"timeout" : 7,
|
||||
"chatdata" : chatdata
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
|
||||
assert ret["kind"]== "youtube#liveChatMessageListResponse"
|
||||
assert ret["pollingIntervalMillis"]==data["timeout"]*1000
|
||||
assert ret["kind"] == "youtube#liveChatMessageListResponse"
|
||||
assert ret["pollingIntervalMillis"] == data["timeout"] * 1000
|
||||
assert ret.keys() == {
|
||||
"kind", "etag", "pageInfo", "nextPageToken","pollingIntervalMillis","items"
|
||||
"kind", "etag", "pageInfo", "nextPageToken", "pollingIntervalMillis", "items"
|
||||
}
|
||||
assert ret["pageInfo"].keys() == {
|
||||
"totalResults", "resultsPerPage"
|
||||
}
|
||||
assert ret["items"][0].keys() == {
|
||||
"kind", "etag", "id", "snippet","authorDetails"
|
||||
"kind", "etag", "id", "snippet", "authorDetails"
|
||||
}
|
||||
assert ret["items"][0]["snippet"].keys() == {
|
||||
'type', 'liveChatId', 'authorChannelId', 'publishedAt', 'hasDisplayContent', 'displayMessage'
|
||||
|
||||
}
|
||||
assert ret["items"][0]["authorDetails"].keys() == {
|
||||
'channelId', 'channelUrl', 'displayName', 'profileImageUrl', 'isVerified', 'isChatOwner', 'isChatSponsor', 'isChatModerator'
|
||||
'channelId', 'channelUrl', 'displayName', 'profileImageUrl', 'isVerified', 'isChatOwner', 'isChatSponsor',
|
||||
'isChatModerator'
|
||||
}
|
||||
assert "LCC." in ret["items"][0]["id"]
|
||||
assert ret["items"][0]["snippet"]["type"]=="newSponsorEvent"
|
||||
assert ret["items"][0]["snippet"]["type"] == "newSponsorEvent"
|
||||
|
||||
|
||||
def test_newsponcer_rev(mocker):
|
||||
'''api互換processorのテスト:メンバ新規登録'''
|
||||
processor = CompatibleProcessor()
|
||||
|
||||
_json = _open_file("tests/testdata/compatible/newSponsor_rev.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
|
||||
assert ret["kind"] == "youtube#liveChatMessageListResponse"
|
||||
assert ret["pollingIntervalMillis"] == data["timeout"] * 1000
|
||||
assert ret.keys() == {
|
||||
"kind", "etag", "pageInfo", "nextPageToken", "pollingIntervalMillis", "items"
|
||||
}
|
||||
assert ret["pageInfo"].keys() == {
|
||||
"totalResults", "resultsPerPage"
|
||||
}
|
||||
assert ret["items"][0].keys() == {
|
||||
"kind", "etag", "id", "snippet", "authorDetails"
|
||||
}
|
||||
assert ret["items"][0]["snippet"].keys() == {
|
||||
'type', 'liveChatId', 'authorChannelId', 'publishedAt', 'hasDisplayContent', 'displayMessage'
|
||||
|
||||
}
|
||||
assert ret["items"][0]["authorDetails"].keys() == {
|
||||
'channelId', 'channelUrl', 'displayName', 'profileImageUrl', 'isVerified', 'isChatOwner', 'isChatSponsor',
|
||||
'isChatModerator'
|
||||
}
|
||||
assert "LCC." in ret["items"][0]["id"]
|
||||
assert ret["items"][0]["snippet"]["type"] == "newSponsorEvent"
|
||||
|
||||
|
||||
def test_superchat(mocker):
|
||||
@@ -93,18 +125,18 @@ def test_superchat(mocker):
|
||||
|
||||
_json = _open_file("tests/testdata/compatible/superchat.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id" : "",
|
||||
"timeout" : 7,
|
||||
"chatdata" : chatdata
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
|
||||
assert ret["kind"]== "youtube#liveChatMessageListResponse"
|
||||
assert ret["pollingIntervalMillis"]==data["timeout"]*1000
|
||||
assert ret["kind"] == "youtube#liveChatMessageListResponse"
|
||||
assert ret["pollingIntervalMillis"] == data["timeout"] * 1000
|
||||
assert ret.keys() == {
|
||||
"kind", "etag", "pageInfo", "nextPageToken","pollingIntervalMillis","items"
|
||||
"kind", "etag", "pageInfo", "nextPageToken", "pollingIntervalMillis", "items"
|
||||
}
|
||||
assert ret["pageInfo"].keys() == {
|
||||
"totalResults", "resultsPerPage"
|
||||
@@ -113,33 +145,36 @@ def test_superchat(mocker):
|
||||
"kind", "etag", "id", "snippet", "authorDetails"
|
||||
}
|
||||
assert ret["items"][0]["snippet"].keys() == {
|
||||
'type', 'liveChatId', 'authorChannelId', 'publishedAt', 'hasDisplayContent', 'displayMessage', 'superChatDetails'
|
||||
'type', 'liveChatId', 'authorChannelId', 'publishedAt', 'hasDisplayContent', 'displayMessage',
|
||||
'superChatDetails'
|
||||
}
|
||||
assert ret["items"][0]["authorDetails"].keys() == {
|
||||
'channelId', 'channelUrl', 'displayName', 'profileImageUrl', 'isVerified', 'isChatOwner', 'isChatSponsor', 'isChatModerator'
|
||||
'channelId', 'channelUrl', 'displayName', 'profileImageUrl', 'isVerified', 'isChatOwner', 'isChatSponsor',
|
||||
'isChatModerator'
|
||||
}
|
||||
assert ret["items"][0]["snippet"]["superChatDetails"].keys() == {
|
||||
'amountMicros', 'currency', 'amountDisplayString', 'tier', 'backgroundColor'
|
||||
}
|
||||
assert "LCC." in ret["items"][0]["id"]
|
||||
assert ret["items"][0]["snippet"]["type"]=="superChatEvent"
|
||||
assert ret["items"][0]["snippet"]["type"] == "superChatEvent"
|
||||
|
||||
|
||||
def test_unregistered_currency(mocker):
|
||||
processor = CompatibleProcessor()
|
||||
|
||||
_json = _open_file("tests/testdata/unregistered_currency.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
|
||||
data = {
|
||||
"video_id" : "",
|
||||
"timeout" : 7,
|
||||
"chatdata" : chatdata
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
assert ret["items"][0]["snippet"]["superChatDetails"]["currency"] == "[UNREGISTERD]"
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
227
tests/test_default_processor.py
Normal file
227
tests/test_default_processor.py
Normal file
@@ -0,0 +1,227 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
from pytchat.parser.live import Parser
|
||||
from pytchat.processors.default.processor import DefaultProcessor
|
||||
|
||||
|
||||
TEST_TIMETSTAMP = 1570678496000000
|
||||
|
||||
|
||||
def get_local_datetime(timestamp):
|
||||
dt = datetime.fromtimestamp(timestamp / 1000000)
|
||||
return dt.strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
|
||||
def test_textmessage(mocker):
|
||||
'''text message'''
|
||||
processor = DefaultProcessor()
|
||||
parser = Parser(is_replay=False)
|
||||
_json = _open_file("tests/testdata/default/textmessage.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
|
||||
ret = processor.process([data]).items[0]
|
||||
assert ret.id == "dummy_id"
|
||||
assert ret.message == "dummy_message"
|
||||
assert ret.timestamp == 1570678496000
|
||||
assert ret.datetime == get_local_datetime(TEST_TIMETSTAMP)
|
||||
assert ret.author.name == "author_name"
|
||||
assert ret.author.channelId == "author_channel_id"
|
||||
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||
assert ret.author.badgeUrl == ""
|
||||
assert ret.author.isVerified is False
|
||||
assert ret.author.isChatOwner is False
|
||||
assert ret.author.isChatSponsor is False
|
||||
assert ret.author.isChatModerator is False
|
||||
|
||||
|
||||
def test_textmessage_replay_member(mocker):
|
||||
'''text message replay member'''
|
||||
processor = DefaultProcessor()
|
||||
parser = Parser(is_replay=True)
|
||||
_json = _open_file("tests/testdata/default/replay_member_text.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
|
||||
ret = processor.process([data]).items[0]
|
||||
assert ret.type == "textMessage"
|
||||
assert ret.id == "dummy_id"
|
||||
assert ret.message == "dummy_message"
|
||||
assert ret.messageEx == ["dummy_message"]
|
||||
assert ret.timestamp == 1570678496000
|
||||
assert ret.datetime == get_local_datetime(TEST_TIMETSTAMP)
|
||||
assert ret.elapsedTime == "1:23:45"
|
||||
assert ret.author.name == "author_name"
|
||||
assert ret.author.channelId == "author_channel_id"
|
||||
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||
assert ret.author.badgeUrl == "https://yt3.ggpht.com/X=s16-c-k"
|
||||
assert ret.author.isVerified is False
|
||||
assert ret.author.isChatOwner is False
|
||||
assert ret.author.isChatSponsor is True
|
||||
assert ret.author.isChatModerator is False
|
||||
|
||||
|
||||
def test_superchat(mocker):
|
||||
'''superchat'''
|
||||
processor = DefaultProcessor()
|
||||
parser = Parser(is_replay=False)
|
||||
_json = _open_file("tests/testdata/default/superchat.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
|
||||
ret = processor.process([data]).items[0]
|
||||
assert ret.type == "superChat"
|
||||
assert ret.id == "dummy_id"
|
||||
assert ret.message == "dummy_message"
|
||||
assert ret.messageEx == ["dummy_message"]
|
||||
assert ret.timestamp == 1570678496000
|
||||
assert ret.datetime == get_local_datetime(TEST_TIMETSTAMP)
|
||||
assert ret.elapsedTime == ""
|
||||
assert ret.amountValue == 800
|
||||
assert ret.amountString == "¥800"
|
||||
assert ret.currency == "JPY"
|
||||
assert ret.bgColor == 4280150454
|
||||
assert ret.author.name == "author_name"
|
||||
assert ret.author.channelId == "author_channel_id"
|
||||
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||
assert ret.author.badgeUrl == ""
|
||||
assert ret.author.isVerified is False
|
||||
assert ret.author.isChatOwner is False
|
||||
assert ret.author.isChatSponsor is False
|
||||
assert ret.author.isChatModerator is False
|
||||
assert ret.colors.headerBackgroundColor == 4278239141
|
||||
assert ret.colors.headerTextColor == 4278190080
|
||||
assert ret.colors.bodyBackgroundColor == 4280150454
|
||||
assert ret.colors.bodyTextColor == 4278190080
|
||||
assert ret.colors.authorNameTextColor == 2315255808
|
||||
assert ret.colors.timestampColor == 2147483648
|
||||
|
||||
|
||||
def test_supersticker(mocker):
|
||||
'''supersticker'''
|
||||
processor = DefaultProcessor()
|
||||
parser = Parser(is_replay=False)
|
||||
_json = _open_file("tests/testdata/default/supersticker.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
|
||||
ret = processor.process([data]).items[0]
|
||||
assert ret.type == "superSticker"
|
||||
assert ret.id == "dummy_id"
|
||||
assert ret.message == ""
|
||||
assert ret.messageEx == []
|
||||
assert ret.timestamp == 1570678496000
|
||||
assert ret.datetime == get_local_datetime(TEST_TIMETSTAMP)
|
||||
assert ret.elapsedTime == ""
|
||||
assert ret.amountValue == 200
|
||||
assert ret.amountString == "¥200"
|
||||
assert ret.currency == "JPY"
|
||||
assert ret.bgColor == 4278237396
|
||||
assert ret.sticker == "https://lh3.googleusercontent.com/param_s=s72-rp"
|
||||
assert ret.author.name == "author_name"
|
||||
assert ret.author.channelId == "author_channel_id"
|
||||
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||
assert ret.author.badgeUrl == ""
|
||||
assert ret.author.isVerified is False
|
||||
assert ret.author.isChatOwner is False
|
||||
assert ret.author.isChatSponsor is False
|
||||
assert ret.author.isChatModerator is False
|
||||
assert ret.colors.backgroundColor == 4278237396
|
||||
assert ret.colors.moneyChipBackgroundColor == 4278248959
|
||||
assert ret.colors.moneyChipTextColor == 4278190080
|
||||
assert ret.colors.authorNameTextColor == 3003121664
|
||||
|
||||
|
||||
def test_sponsor(mocker):
|
||||
'''sponsor(membership)'''
|
||||
processor = DefaultProcessor()
|
||||
parser = Parser(is_replay=False)
|
||||
_json = _open_file("tests/testdata/default/newSponsor_current.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
|
||||
ret = processor.process([data]).items[0]
|
||||
assert ret.type == "newSponsor"
|
||||
assert ret.id == "dummy_id"
|
||||
assert ret.message == "新規メンバー"
|
||||
assert ret.messageEx == ["新規メンバー"]
|
||||
assert ret.timestamp == 1570678496000
|
||||
assert ret.datetime == get_local_datetime(TEST_TIMETSTAMP)
|
||||
assert ret.elapsedTime == ""
|
||||
assert ret.bgColor == 0
|
||||
assert ret.author.name == "author_name"
|
||||
assert ret.author.channelId == "author_channel_id"
|
||||
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||
assert ret.author.badgeUrl == "https://yt3.ggpht.com/X=s32-c-k"
|
||||
assert ret.author.isVerified is False
|
||||
assert ret.author.isChatOwner is False
|
||||
assert ret.author.isChatSponsor is True
|
||||
assert ret.author.isChatModerator is False
|
||||
|
||||
|
||||
def test_sponsor_legacy(mocker):
|
||||
'''lagacy sponsor(membership)'''
|
||||
processor = DefaultProcessor()
|
||||
parser = Parser(is_replay=False)
|
||||
_json = _open_file("tests/testdata/default/newSponsor_lagacy.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id": "",
|
||||
"timeout": 7,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
|
||||
ret = processor.process([data]).items[0]
|
||||
assert ret.type == "newSponsor"
|
||||
assert ret.id == "dummy_id"
|
||||
assert ret.message == "新規メンバー / ようこそ、author_name!"
|
||||
assert ret.messageEx == ["新規メンバー / ようこそ、author_name!"]
|
||||
assert ret.timestamp == 1570678496000
|
||||
assert ret.datetime == get_local_datetime(TEST_TIMETSTAMP)
|
||||
assert ret.elapsedTime == ""
|
||||
assert ret.bgColor == 0
|
||||
assert ret.author.name == "author_name"
|
||||
assert ret.author.channelId == "author_channel_id"
|
||||
assert ret.author.channelUrl == "http://www.youtube.com/channel/author_channel_id"
|
||||
assert ret.author.imageUrl == "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg"
|
||||
assert ret.author.badgeUrl == ""
|
||||
assert ret.author.isVerified is False
|
||||
assert ret.author.isChatOwner is False
|
||||
assert ret.author.isChatSponsor is True
|
||||
assert ret.author.isChatModerator is False
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
@@ -1,77 +0,0 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
from pytchat.tool.extract import parser
|
||||
import sys
|
||||
import time
|
||||
from aioresponses import aioresponses
|
||||
from concurrent.futures import CancelledError
|
||||
from pytchat.tool.extract import asyncdl
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def test_asyncdl_split():
|
||||
|
||||
ret = asyncdl._split(0,1000,1)
|
||||
assert ret == [0]
|
||||
|
||||
ret = asyncdl._split(1000,1000,10)
|
||||
assert ret == [1000]
|
||||
|
||||
ret = asyncdl._split(0,1000,5)
|
||||
assert ret == [0,200,400,600,800]
|
||||
|
||||
ret = asyncdl._split(10.5, 700.3, 5)
|
||||
assert ret == [10, 148, 286, 424, 562]
|
||||
|
||||
|
||||
ret = asyncdl._split(0,500,5)
|
||||
assert ret == [0,125,250,375]
|
||||
|
||||
ret = asyncdl._split(0,500,500)
|
||||
assert ret == [0,125,250,375]
|
||||
|
||||
ret = asyncdl._split(-1,1000,5)
|
||||
assert ret == [-1, 199, 399, 599, 799]
|
||||
|
||||
"""invalid argument order"""
|
||||
try:
|
||||
ret = asyncdl._split(500,0,5)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
"""invalid count"""
|
||||
try:
|
||||
ret = asyncdl._split(0,500,-1)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
try:
|
||||
ret = asyncdl._split(0,500,0)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
"""invalid argument type"""
|
||||
try:
|
||||
ret = asyncdl._split(0,5000,5.2)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
try:
|
||||
ret = asyncdl._split(0,5000,"test")
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
|
||||
try:
|
||||
ret = asyncdl._split([0,1],5000,5)
|
||||
assert False
|
||||
except ValueError:
|
||||
assert True
|
||||
@@ -1,128 +0,0 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
import os, sys
|
||||
import time
|
||||
from pytchat.tool.extract import duplcheck
|
||||
from pytchat.tool.extract import parser
|
||||
from pytchat.tool.extract.block import Block
|
||||
from pytchat.tool.extract.duplcheck import _dump
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
|
||||
def test_overlap():
|
||||
"""
|
||||
test overlap data
|
||||
operation : [0] [2] [3] [4] -> last :align to end
|
||||
[1] , [5] -> no change
|
||||
|
||||
"""
|
||||
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/extract_duplcheck/overlap/"+filename))
|
||||
)[1]
|
||||
|
||||
blocks = (
|
||||
Block(first = 0, last= 12771, end= 9890,chat_data = load_chatdata("dp0-0.json")),
|
||||
Block(first = 9890, last= 15800, end= 20244,chat_data = load_chatdata("dp0-1.json")),
|
||||
Block(first = 20244,last= 45146, end= 32476,chat_data = load_chatdata("dp0-2.json")),
|
||||
Block(first = 32476,last= 50520, end= 41380,chat_data = load_chatdata("dp0-3.json")),
|
||||
Block(first = 41380,last= 62875, end= 52568,chat_data = load_chatdata("dp0-4.json")),
|
||||
Block(first = 52568,last= 62875, end= 54000,chat_data = load_chatdata("dp0-5.json"),is_last=True)
|
||||
)
|
||||
result = duplcheck.remove_overlap(blocks)
|
||||
#dp0-0.json has item offset time is 9890 (equals block[0].end = block[1].first),
|
||||
#but must be aligne to the most close and smaller value:9779.
|
||||
assert result[0].last == 9779
|
||||
|
||||
assert result[1].last == 15800
|
||||
|
||||
assert result[2].last == 32196
|
||||
|
||||
assert result[3].last == 41116
|
||||
|
||||
assert result[4].last == 52384
|
||||
|
||||
#the last block must be always added to result.
|
||||
assert result[5].last == 62875
|
||||
|
||||
def test_duplicate_head():
|
||||
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
|
||||
)[1]
|
||||
|
||||
"""
|
||||
test duplicate head data
|
||||
operation : [0] , [1] -> discard [0]
|
||||
[1] , [2] -> discard [1]
|
||||
[2] , [3] -> append [2]
|
||||
[3] , [4] -> discard [3]
|
||||
[4] , [5] -> append [4]
|
||||
append [5]
|
||||
|
||||
result : [2] , [4] , [5]
|
||||
"""
|
||||
|
||||
#chat data offsets are ignored.
|
||||
blocks = (
|
||||
Block(first = 0, last = 2500, chat_data = load_chatdata("dp0-0.json")),
|
||||
Block(first = 0, last =38771, chat_data = load_chatdata("dp0-1.json")),
|
||||
Block(first = 0, last =45146, chat_data = load_chatdata("dp0-2.json")),
|
||||
Block(first = 20244, last =60520, chat_data = load_chatdata("dp0-3.json")),
|
||||
Block(first = 20244, last =62875, chat_data = load_chatdata("dp0-4.json")),
|
||||
Block(first = 52568, last =62875, chat_data = load_chatdata("dp0-5.json"))
|
||||
)
|
||||
_dump(blocks)
|
||||
result = duplcheck.remove_duplicate_head(blocks)
|
||||
|
||||
assert len(result) == 3
|
||||
assert result[0].first == blocks[2].first
|
||||
assert result[0].last == blocks[2].last
|
||||
assert result[1].first == blocks[4].first
|
||||
assert result[1].last == blocks[4].last
|
||||
assert result[2].first == blocks[5].first
|
||||
assert result[2].last == blocks[5].last
|
||||
|
||||
def test_duplicate_tail():
|
||||
"""
|
||||
test duplicate tail data
|
||||
operation : append [0]
|
||||
[0] , [1] -> discard [1]
|
||||
[1] , [2] -> append [2]
|
||||
[2] , [3] -> discard [3]
|
||||
[3] , [4] -> append [4]
|
||||
[4] , [5] -> discard [5]
|
||||
|
||||
result : [0] , [2] , [4]
|
||||
"""
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/extract_duplcheck/head/"+filename))
|
||||
)[1]
|
||||
#chat data offsets are ignored.
|
||||
blocks = (
|
||||
Block(first = 0,last = 2500, chat_data=load_chatdata("dp0-0.json")),
|
||||
Block(first = 1500,last = 2500, chat_data=load_chatdata("dp0-1.json")),
|
||||
Block(first = 10000,last = 45146, chat_data=load_chatdata("dp0-2.json")),
|
||||
Block(first = 20244,last = 45146, chat_data=load_chatdata("dp0-3.json")),
|
||||
Block(first = 20244,last = 62875, chat_data=load_chatdata("dp0-4.json")),
|
||||
Block(first = 52568,last = 62875, chat_data=load_chatdata("dp0-5.json"))
|
||||
)
|
||||
|
||||
result = duplcheck.remove_duplicate_tail(blocks)
|
||||
_dump(result)
|
||||
assert len(result) == 3
|
||||
assert result[0].first == blocks[0].first
|
||||
assert result[0].last == blocks[0].last
|
||||
assert result[1].first == blocks[2].first
|
||||
assert result[1].last == blocks[2].last
|
||||
assert result[2].first == blocks[4].first
|
||||
assert result[2].last == blocks[4].last
|
||||
|
||||
|
||||
@@ -1,238 +0,0 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
import os, sys
|
||||
import time
|
||||
from aioresponses import aioresponses
|
||||
from pytchat.tool.extract import duplcheck
|
||||
from pytchat.tool.extract import parser
|
||||
from pytchat.tool.extract.block import Block
|
||||
from pytchat.tool.extract.patch import Patch, fill, split, set_patch
|
||||
from pytchat.tool.extract.duplcheck import _dump
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
def load_chatdata(filename):
|
||||
return parser.parse(
|
||||
json.loads(_open_file("tests/testdata/fetch_patch/"+filename))
|
||||
)[1]
|
||||
|
||||
|
||||
def test_split_0():
|
||||
"""
|
||||
Normal case
|
||||
|
||||
~~~~~~ before ~~~~~~
|
||||
|
||||
@parent_block (# = already fetched)
|
||||
|
||||
first last end
|
||||
|########----------------------------------------|
|
||||
|
||||
|
||||
@child_block
|
||||
|
||||
first = last = 0 end (=parent_end)
|
||||
| |
|
||||
|
||||
|
||||
@fetched patch
|
||||
|-- patch --|
|
||||
|
||||
|
||||
|
|
||||
|
|
||||
V
|
||||
|
||||
~~~~~~ after ~~~~~~
|
||||
|
||||
|
||||
@parent_block
|
||||
|
||||
first last end (after split)
|
||||
|########------------|
|
||||
|
||||
@child_block
|
||||
first last end
|
||||
|###########---------------|
|
||||
|
||||
@fetched patch
|
||||
|-- patch --|
|
||||
"""
|
||||
parent = Block(first=0, last=4000, end=60000, continuation='parent', during_split=True)
|
||||
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
|
||||
assert child.continuation == 'patch'
|
||||
assert parent.last < child.first
|
||||
assert parent.end == child.first
|
||||
assert child.first < child.last
|
||||
assert child.last < child.end
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == False
|
||||
|
||||
def test_split_1():
|
||||
"""patch.first <= parent_block.last
|
||||
|
||||
While awaiting at run()->asyncdl._fetch()
|
||||
fetching parent_block proceeds,
|
||||
and parent.block.last exceeds patch.first.
|
||||
|
||||
In this case, fetched patch is all discarded,
|
||||
and worker searches other processing block again.
|
||||
|
||||
~~~~~~ before ~~~~~~
|
||||
|
||||
patch.first
|
||||
first | last end
|
||||
|####################|#####|---------------------|
|
||||
^
|
||||
@child_block
|
||||
first = last = 0 end (=parent_end)
|
||||
| |
|
||||
|
||||
@fetched patch
|
||||
|-- patch --|
|
||||
|
||||
|
||||
|
|
||||
|
|
||||
V
|
||||
|
||||
~~~~~~ after ~~~~~~
|
||||
|
||||
@parent_block
|
||||
first last end
|
||||
|###########################|--------------------|
|
||||
|
||||
@child_block
|
||||
|
||||
.............. -> discard all data
|
||||
|
||||
"""
|
||||
parent = Block(first=0, last=33000, end=60000, continuation='parent', during_split=True)
|
||||
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
|
||||
assert parent.last == 33000 #no change
|
||||
assert parent.end == 60000 #no change
|
||||
assert child.continuation is None
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == True #exclude during_split sequence
|
||||
|
||||
def test_split_2():
|
||||
"""child_block.end < patch.last:
|
||||
|
||||
Case the last offset of patch exceeds child_block.end.
|
||||
In this case, remove overlapped data of patch.
|
||||
|
||||
~~~~~~ before ~~~~~~
|
||||
|
||||
@parent_block (# = already fetched)
|
||||
first last end (before split)
|
||||
|########------------------------------|
|
||||
|
||||
@child_block
|
||||
first = last = 0 end (=parent_end)
|
||||
| |
|
||||
|
||||
continuation:succeed from patch
|
||||
|
||||
@fetched patch
|
||||
|-------- patch --------|
|
||||
|
||||
|
||||
|
|
||||
|
|
||||
V
|
||||
|
||||
~~~~~~ after ~~~~~~
|
||||
|
||||
@parent_block
|
||||
first last end (after split)
|
||||
|########------------|
|
||||
|
||||
@child_block old patch.end
|
||||
first last=end |
|
||||
|#################|...... cut extra data.
|
||||
^
|
||||
continuation : None (extract complete)
|
||||
|
||||
@fetched patch
|
||||
|-------- patch --------|
|
||||
"""
|
||||
parent = Block(first=0, last=4000, end=33500, continuation='parent', during_split=True)
|
||||
child = Block(first=0, last=0, end=33500, continuation='mean', during_split=True)
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
|
||||
assert child.continuation is None
|
||||
assert parent.last < child.first
|
||||
assert parent.end == child.first
|
||||
assert child.first < child.last
|
||||
assert child.last < child.end
|
||||
assert child.continuation is None
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == False
|
||||
|
||||
def test_split_none():
|
||||
"""patch.last <= parent_block.last
|
||||
|
||||
While awaiting at run()->asyncdl._fetch()
|
||||
fetching parent_block proceeds,
|
||||
and parent.block.last exceeds patch.first.
|
||||
|
||||
In this case, fetched patch is all discarded,
|
||||
and worker searches other processing block again.
|
||||
|
||||
~~~~~~ before ~~~~~~
|
||||
|
||||
patch.first
|
||||
first | last end
|
||||
|####################|###################|-------|
|
||||
^
|
||||
@child_block
|
||||
first = last = 0 end (=parent_end)
|
||||
| |
|
||||
|
||||
@fetched patch
|
||||
|-- patch --|
|
||||
patch.last < parent_block.last .
|
||||
|
||||
|
|
||||
|
|
||||
V
|
||||
|
||||
~~~~~~ after ~~~~~~
|
||||
|
||||
@parent_block
|
||||
first last end (before split)
|
||||
|########################################|-------|
|
||||
|
||||
@child_block
|
||||
|
||||
............ -> discard all data.
|
||||
|
||||
"""
|
||||
parent = Block(first=0, last=40000, end=60000, continuation='parent', during_split=True)
|
||||
child = Block(first=0, last=0, end=60000, continuation='mean', during_split=True)
|
||||
patch = Patch(chats=load_chatdata('pt0-5.json'),
|
||||
first=32500, last=34000, continuation='patch')
|
||||
|
||||
split(parent,child,patch)
|
||||
|
||||
assert parent.last == 40000 #no change
|
||||
assert parent.end == 60000 #no change
|
||||
assert child.continuation is None
|
||||
assert parent.during_split == False
|
||||
assert child.during_split == True #exclude during_split sequence
|
||||
55
tests/test_extract_video_id.py
Normal file
55
tests/test_extract_video_id.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from pytchat.util import extract_video_id
|
||||
from pytchat.exceptions import InvalidVideoIdException
|
||||
|
||||
VALID_TEST_PATTERNS = (
|
||||
("ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||
("vid_test_be", "vid_test_be"),
|
||||
("https://www.youtube.com/watch?v=123_456_789", "123_456_789"),
|
||||
("https://www.youtube.com/watch?v=123_456_789&t=123s", "123_456_789"),
|
||||
("www.youtube.com/watch?v=123_456_789", "123_456_789"),
|
||||
("watch?v=123_456_789", "123_456_789"),
|
||||
("youtube.com/watch?v=123_456_789", "123_456_789"),
|
||||
("http://youtu.be/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||
("youtu.be/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||
("https://www.youtube.com/watch?v=ABC_EFG_IJK&list=XYZ_ABC_12345&start_radio=1&t=1", "ABC_EFG_IJK"),
|
||||
("https://www.youtube.com/embed/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||
("www.youtube.com/embed/ABC_EFG_IJK", "ABC_EFG_IJK"),
|
||||
("youtube.com/embed/ABC_EFG_IJK", "ABC_EFG_IJK")
|
||||
)
|
||||
|
||||
INVALID_TEST_PATTERNS = (
|
||||
("", ""),
|
||||
("0123456789", "0123456789"), # less than 11 letters id
|
||||
("more_than_11_letter_string", "more_than_11_letter_string"),
|
||||
("https://www.youtube.com/watch?v=more_than_11_letter_string", "more_than_11_letter_string"),
|
||||
("https://www.youtube.com/channel/123_456_789", "123_456_789"),
|
||||
)
|
||||
|
||||
TYPEERROR_TEST_PATTERNS = (
|
||||
(100, 100), # not string
|
||||
(["123_456_789"], "123_456_789"), # not string
|
||||
)
|
||||
|
||||
|
||||
def test_extract_valid_pattern():
|
||||
for pattern in VALID_TEST_PATTERNS:
|
||||
ret = extract_video_id(pattern[0])
|
||||
assert ret == pattern[1]
|
||||
|
||||
|
||||
def test_extract_invalid_pattern():
|
||||
for pattern in INVALID_TEST_PATTERNS:
|
||||
try:
|
||||
extract_video_id(pattern[0])
|
||||
assert False
|
||||
except InvalidVideoIdException:
|
||||
assert True
|
||||
|
||||
|
||||
def test_extract_typeerror_pattern():
|
||||
for pattern in TYPEERROR_TEST_PATTERNS:
|
||||
try:
|
||||
extract_video_id(pattern[0])
|
||||
assert False
|
||||
except TypeError:
|
||||
assert True
|
||||
@@ -1,53 +0,0 @@
|
||||
import pytest
|
||||
from pytchat.parser.live import Parser
|
||||
|
||||
import json
|
||||
import asyncio,aiohttp
|
||||
|
||||
from aioresponses import aioresponses
|
||||
from pytchat.core_async.livechat import LiveChatAsync
|
||||
from pytchat.exceptions import (
|
||||
NoLivechatRendererException,NoYtinitialdataException,
|
||||
ResponseContextError,NoContentsException)
|
||||
|
||||
|
||||
from pytchat.core_multithread.livechat import LiveChat
|
||||
import unittest
|
||||
from unittest import TestCase
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
@aioresponses()
|
||||
def test_Async(*mock):
|
||||
vid=''
|
||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
||||
_text = json.loads(_text)
|
||||
mock[0].get(f"https://www.youtube.com/live_chat?v={vid}&is_popout=1", status=200, body=_text)
|
||||
try:
|
||||
chat = LiveChatAsync(video_id='')
|
||||
assert chat.is_alive()
|
||||
chat.terminate()
|
||||
assert not chat.is_alive()
|
||||
except ResponseContextError:
|
||||
assert not chat.is_alive()
|
||||
|
||||
def test_MultiThread(mocker):
|
||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
||||
_text = json.loads(_text)
|
||||
responseMock = mocker.Mock()
|
||||
responseMock.status_code = 200
|
||||
responseMock.text = _text
|
||||
mocker.patch('requests.Session.get').return_value = responseMock
|
||||
try:
|
||||
chat = LiveChatAsync(video_id='')
|
||||
assert chat.is_alive()
|
||||
chat.terminate()
|
||||
assert not chat.is_alive()
|
||||
except ResponseContextError:
|
||||
chat.terminate()
|
||||
assert not chat.is_alive()
|
||||
|
||||
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
import asyncio, aiohttp
|
||||
import json
|
||||
import pytest
|
||||
import re
|
||||
import requests
|
||||
import sys
|
||||
import time
|
||||
from aioresponses import aioresponses
|
||||
from concurrent.futures import CancelledError
|
||||
from unittest import TestCase
|
||||
from pytchat.core_multithread.livechat import LiveChat
|
||||
from pytchat.core_async.livechat import LiveChatAsync
|
||||
from pytchat.exceptions import (
|
||||
NoLivechatRendererException,NoYtinitialdataException,
|
||||
ResponseContextError,NoContentsException)
|
||||
from pytchat.parser.live import Parser
|
||||
from pytchat.processors.dummy_processor import DummyProcessor
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
@aioresponses()
|
||||
def test_async_live_stream(*mock):
|
||||
|
||||
async def test_loop(*mock):
|
||||
pattern = re.compile(r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
||||
_text = _open_file('tests/testdata/test_stream.json')
|
||||
mock[0].get(pattern, status=200, body=_text)
|
||||
chat = LiveChatAsync(video_id='', processor = DummyProcessor())
|
||||
chats = await chat.get()
|
||||
rawdata = chats[0]["chatdata"]
|
||||
#assert fetching livachat data
|
||||
assert list(rawdata[0]["addChatItemAction"]["item"].keys())[0] == "liveChatTextMessageRenderer"
|
||||
assert list(rawdata[1]["addChatItemAction"]["item"].keys())[0] == "liveChatTextMessageRenderer"
|
||||
assert list(rawdata[2]["addChatItemAction"]["item"].keys())[0] == "liveChatPlaceholderItemRenderer"
|
||||
assert list(rawdata[3]["addLiveChatTickerItemAction"]["item"].keys())[0] == "liveChatTickerPaidMessageItemRenderer"
|
||||
assert list(rawdata[4]["addChatItemAction"]["item"].keys())[0] == "liveChatPaidMessageRenderer"
|
||||
assert list(rawdata[5]["addChatItemAction"]["item"].keys())[0] == "liveChatPaidStickerRenderer"
|
||||
assert list(rawdata[6]["addLiveChatTickerItemAction"]["item"].keys())[0] == "liveChatTickerSponsorItemRenderer"
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(test_loop(*mock))
|
||||
except CancelledError:
|
||||
assert True
|
||||
|
||||
@aioresponses()
|
||||
def test_async_replay_stream(*mock):
|
||||
|
||||
async def test_loop(*mock):
|
||||
pattern_live = re.compile(r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
||||
pattern_replay = re.compile(r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
|
||||
#empty livechat -> switch to fetch replaychat
|
||||
_text_live = _open_file('tests/testdata/finished_live.json')
|
||||
_text_replay = _open_file('tests/testdata/chatreplay.json')
|
||||
mock[0].get(pattern_live, status=200, body=_text_live)
|
||||
mock[0].get(pattern_replay, status=200, body=_text_replay)
|
||||
|
||||
chat = LiveChatAsync(video_id='', processor = DummyProcessor())
|
||||
chats = await chat.get()
|
||||
rawdata = chats[0]["chatdata"]
|
||||
#assert fetching replaychat data
|
||||
assert list(rawdata[0]["addChatItemAction"]["item"].keys())[0] == "liveChatTextMessageRenderer"
|
||||
assert list(rawdata[14]["addChatItemAction"]["item"].keys())[0] == "liveChatPaidMessageRenderer"
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(test_loop(*mock))
|
||||
except CancelledError:
|
||||
assert True
|
||||
|
||||
@aioresponses()
|
||||
def test_async_force_replay(*mock):
|
||||
|
||||
async def test_loop(*mock):
|
||||
pattern_live = re.compile(r'^https://www.youtube.com/live_chat/get_live_chat\?continuation=.*$')
|
||||
pattern_replay = re.compile(r'^https://www.youtube.com/live_chat_replay/get_live_chat_replay\?continuation=.*$')
|
||||
#valid live data, but force_replay = True
|
||||
_text_live = _open_file('tests/testdata/test_stream.json')
|
||||
#valid replay data
|
||||
_text_replay = _open_file('tests/testdata/chatreplay.json')
|
||||
|
||||
mock[0].get(pattern_live, status=200, body=_text_live)
|
||||
mock[0].get(pattern_replay, status=200, body=_text_replay)
|
||||
#force replay
|
||||
chat = LiveChatAsync(video_id='', processor = DummyProcessor(), force_replay = True)
|
||||
chats = await chat.get()
|
||||
rawdata = chats[0]["chatdata"]
|
||||
# assert fetching replaychat data
|
||||
assert list(rawdata[14]["addChatItemAction"]["item"].keys())[0] == "liveChatPaidMessageRenderer"
|
||||
# assert not mix livechat data
|
||||
assert list(rawdata[2]["addChatItemAction"]["item"].keys())[0] != "liveChatPlaceholderItemRenderer"
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
loop.run_until_complete(test_loop(*mock))
|
||||
except CancelledError:
|
||||
assert True
|
||||
|
||||
def test_multithread_live_stream(mocker):
|
||||
|
||||
_text = _open_file('tests/testdata/test_stream.json')
|
||||
responseMock = mocker.Mock()
|
||||
responseMock.status_code = 200
|
||||
responseMock.text = _text
|
||||
mocker.patch('requests.Session.get').return_value.__enter__.return_value = responseMock
|
||||
|
||||
chat = LiveChat(video_id='test_id', processor = DummyProcessor())
|
||||
chats = chat.get()
|
||||
rawdata = chats[0]["chatdata"]
|
||||
#assert fetching livachat data
|
||||
assert list(rawdata[0]["addChatItemAction"]["item"].keys())[0] == "liveChatTextMessageRenderer"
|
||||
assert list(rawdata[1]["addChatItemAction"]["item"].keys())[0] == "liveChatTextMessageRenderer"
|
||||
assert list(rawdata[2]["addChatItemAction"]["item"].keys())[0] == "liveChatPlaceholderItemRenderer"
|
||||
assert list(rawdata[3]["addLiveChatTickerItemAction"]["item"].keys())[0] == "liveChatTickerPaidMessageItemRenderer"
|
||||
assert list(rawdata[4]["addChatItemAction"]["item"].keys())[0] == "liveChatPaidMessageRenderer"
|
||||
assert list(rawdata[5]["addChatItemAction"]["item"].keys())[0] == "liveChatPaidStickerRenderer"
|
||||
assert list(rawdata[6]["addLiveChatTickerItemAction"]["item"].keys())[0] == "liveChatTickerSponsorItemRenderer"
|
||||
chat.terminate()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import pytest
|
||||
from pytchat.paramgen import liveparam
|
||||
|
||||
def test_liveparam_0(mocker):
|
||||
_ts1= 1546268400
|
||||
param = liveparam._build("01234567890",
|
||||
*([_ts1*1000000 for i in range(5)]), topchat_only=False)
|
||||
test_param="0ofMyAPiARp8Q2c4S0RRb0xNREV5TXpRMU5qYzRPVEFhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5TURFeU16UTFOamM0T1RBbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCiAuNbVqsrfAjAAOABAAkorCAEQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQgLjW1arK3wJYA1CAuNbVqsrfAliAuNbVqsrfAmgBggEECAEQAIgBAKABgLjW1arK3wI%3D"
|
||||
assert test_param == param
|
||||
@@ -1,44 +1,34 @@
|
||||
import pytest
|
||||
from pytchat.parser.live import Parser
|
||||
import json
|
||||
import asyncio,aiohttp
|
||||
from aioresponses import aioresponses
|
||||
from pytchat.exceptions import (
|
||||
NoLivechatRendererException,NoYtinitialdataException,
|
||||
ResponseContextError, NoContentsException)
|
||||
from pytchat.exceptions import NoContents
|
||||
|
||||
|
||||
parser = Parser(is_replay=False)
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
parser = Parser(is_replay = False)
|
||||
|
||||
@aioresponses()
|
||||
|
||||
def test_finishedlive(*mock):
|
||||
'''配信が終了した動画を正しく処理できるか'''
|
||||
|
||||
_text = _open_file('tests/testdata/finished_live.json')
|
||||
_text = json.loads(_text)
|
||||
|
||||
try:
|
||||
parser.parse(parser.get_contents(_text))
|
||||
parser.parse(parser.get_contents(_text)[0])
|
||||
assert False
|
||||
except NoContentsException:
|
||||
except NoContents:
|
||||
assert True
|
||||
|
||||
@aioresponses()
|
||||
def test_parsejson(*mock):
|
||||
'''jsonを正常にパースできるか'''
|
||||
|
||||
def test_parsejson(*mock):
|
||||
_text = _open_file('tests/testdata/paramgen_firstread.json')
|
||||
_text = json.loads(_text)
|
||||
|
||||
try:
|
||||
parser.parse(parser.get_contents(_text))
|
||||
jsn = _text
|
||||
timeout = jsn["response"]["continuationContents"]["liveChatContinuation"]["continuations"][0]["timedContinuationData"]["timeoutMs"]
|
||||
continuation = jsn["response"]["continuationContents"]["liveChatContinuation"]["continuations"][0]["timedContinuationData"]["continuation"]
|
||||
assert 5035 == timeout
|
||||
assert "0ofMyAPiARp8Q2c4S0RRb0xhelJMZDBsWFQwdERkalFhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5YXpSTGQwbFhUMHREZGpRbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCiPz5-Os-PkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQgJqXjrPj5AJYA1CRwciOs-PkAli3pNq1k-PkAmgBggEECAEQAIgBAKABjbfnjrPj5AI%3D" == continuation
|
||||
except:
|
||||
s, _ = parser.parse(parser.get_contents(_text)[0])
|
||||
assert s['timeoutMs'] == 5035
|
||||
assert s['continuation'] == "0ofMyAPiARp8Q2c4S0RRb0xhelJMZDBsWFQwdERkalFhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5YXpSTGQwbFhUMHREZGpRbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCiPz5-Os-PkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQgJqXjrPj5AJYA1CRwciOs-PkAli3pNq1k-PkAmgBggEECAEQAIgBAKABjbfnjrPj5AI%3D"
|
||||
except Exception:
|
||||
assert False
|
||||
@@ -1,15 +1,9 @@
|
||||
import json
|
||||
import pytest
|
||||
import asyncio,aiohttp
|
||||
from pytchat.parser.live import Parser
|
||||
from pytchat.processors.compatible.processor import CompatibleProcessor
|
||||
from pytchat.exceptions import (
|
||||
NoLivechatRendererException,NoYtinitialdataException,
|
||||
ResponseContextError, NoContentsException)
|
||||
|
||||
from pytchat.processors.speed.calculator import SpeedCalculator
|
||||
|
||||
parser = Parser(is_replay =False)
|
||||
parser = Parser(is_replay=False)
|
||||
|
||||
|
||||
def test_speed_1(mocker):
|
||||
'''test speed calculation with normal json.
|
||||
@@ -21,15 +15,16 @@ def test_speed_1(mocker):
|
||||
|
||||
_json = _open_file("tests/testdata/speed/speedtest_normal.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id" : "",
|
||||
"timeout" : 10,
|
||||
"chatdata" : chatdata
|
||||
"video_id": "",
|
||||
"timeout": 10,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
assert 30 == ret
|
||||
|
||||
|
||||
def test_speed_2(mocker):
|
||||
'''test speed calculation with no valid chat data.
|
||||
'''
|
||||
@@ -37,14 +32,15 @@ def test_speed_2(mocker):
|
||||
|
||||
_json = _open_file("tests/testdata/speed/speedtest_undefined.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id" : "",
|
||||
"timeout" : 10,
|
||||
"chatdata" : chatdata
|
||||
"video_id": "",
|
||||
"timeout": 10,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
assert 0 == ret
|
||||
assert ret == 0
|
||||
|
||||
|
||||
def test_speed_3(mocker):
|
||||
'''test speed calculation with empty data.
|
||||
@@ -53,16 +49,16 @@ def test_speed_3(mocker):
|
||||
|
||||
_json = _open_file("tests/testdata/speed/speedtest_empty.json")
|
||||
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json)))
|
||||
_, chatdata = parser.parse(parser.get_contents(json.loads(_json))[0])
|
||||
data = {
|
||||
"video_id" : "",
|
||||
"timeout" : 10,
|
||||
"chatdata" : chatdata
|
||||
"video_id": "",
|
||||
"timeout": 10,
|
||||
"chatdata": chatdata
|
||||
}
|
||||
ret = processor.process([data])
|
||||
assert 0 == ret
|
||||
assert ret == 0
|
||||
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
with open(path, mode='r', encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
from pytchat.tool.videoinfo import VideoInfo
|
||||
from pytchat.exceptions import InvalidVideoIdException
|
||||
import pytest
|
||||
|
||||
def _open_file(path):
|
||||
with open(path,mode ='r',encoding = 'utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
def _set_test_data(filepath, mocker):
|
||||
_text = _open_file(filepath)
|
||||
response_mock = mocker.Mock()
|
||||
response_mock.status_code = 200
|
||||
response_mock.text = _text
|
||||
mocker.patch('requests.get').return_value = response_mock
|
||||
|
||||
def test_archived_page(mocker):
|
||||
_set_test_data('tests/testdata/videoinfo/archived_page.txt', mocker)
|
||||
info = VideoInfo('test_id')
|
||||
actual_thumbnail_url = 'https://i.ytimg.com/vi/fzI9FNjXQ0o/hqdefault.jpg'
|
||||
assert info.video_id == 'test_id'
|
||||
assert info.get_channel_name() == 'GitHub'
|
||||
assert info.get_thumbnail() == actual_thumbnail_url
|
||||
assert info.get_title() == 'GitHub Arctic Code Vault'
|
||||
assert info.get_channel_id() == 'UC7c3Kb6jYCRj4JOHHZTxKsQ'
|
||||
assert info.get_duration() == 148
|
||||
|
||||
def test_live_page(mocker):
|
||||
_set_test_data('tests/testdata/videoinfo/live_page.txt', mocker)
|
||||
info = VideoInfo('test_id')
|
||||
'''live page :duration = 0'''
|
||||
assert info.get_duration() == 0
|
||||
assert info.video_id == 'test_id'
|
||||
assert info.get_channel_name() == 'BGM channel'
|
||||
assert info.get_thumbnail() == \
|
||||
'https://i.ytimg.com/vi/fEvM-OUbaKs/hqdefault_live.jpg'
|
||||
assert info.get_title() == (
|
||||
'Coffee Jazz Music - Chill Out Lounge Jazz Music Radio'
|
||||
' - 24/7 Live Stream - Slow Jazz')
|
||||
assert info.get_channel_id() == 'UCQINXHZqCU5i06HzxRkujfg'
|
||||
|
||||
def test_invalid_video_id(mocker):
|
||||
'''Test case invalid video_id is specified.'''
|
||||
_set_test_data(
|
||||
'tests/testdata/videoinfo/invalid_video_id_page.txt', mocker)
|
||||
try:
|
||||
_ = VideoInfo('test_id')
|
||||
assert False
|
||||
except InvalidVideoIdException:
|
||||
assert True
|
||||
|
||||
def test_no_info(mocker):
|
||||
'''Test case the video page has renderer, but no info.'''
|
||||
_set_test_data(
|
||||
'tests/testdata/videoinfo/no_info_page.txt', mocker)
|
||||
info = VideoInfo('test_id')
|
||||
assert info.video_id == 'test_id'
|
||||
assert info.get_channel_name() is None
|
||||
assert info.get_thumbnail() is None
|
||||
assert info.get_title() is None
|
||||
assert info.get_channel_id() is None
|
||||
assert info.get_duration() is None
|
||||
|
||||
2
tests/testdata/calculator/superchat_0.json
vendored
2
tests/testdata/calculator/superchat_0.json
vendored
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"response": {
|
||||
"responseContext": {
|
||||
"webResponseContextExtensionData": ""
|
||||
},
|
||||
@@ -3320,5 +3319,4 @@
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
2
tests/testdata/calculator/text_only.json
vendored
2
tests/testdata/calculator/text_only.json
vendored
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"response": {
|
||||
"responseContext": {
|
||||
"webResponseContextExtensionData": ""
|
||||
},
|
||||
@@ -85,5 +84,4 @@
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
4
tests/testdata/chatreplay.json
vendored
4
tests/testdata/chatreplay.json
vendored
@@ -15,7 +15,6 @@
|
||||
"url": "/live_chat_replay/get_live_chat_replay?continuation=op2w0wRyGjxDZzhhRFFvTFUzTnFRMjVJVDJzdFUyc2FFLXFvM2JrQkRRb0xVM05xUTI1SVQyc3RVMnNnQVElM0QlM0QoATAAOABAAEgEUhwIABAAGAAgACoOc3RhdGljY2hlY2tzdW1AAFgDYAFoAHIECAEQAHgA"
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
"responseContext": {
|
||||
"serviceTrackingParams": [
|
||||
{
|
||||
@@ -3096,9 +3095,8 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"csn": "CAXIXa-uHZnC4wL5nZ7oBQ",
|
||||
"url": "\/live_chat_replay\/get_live_chat_replay?continuation=op2w0wRyGjxDZzhhRFFvTFUzTnFRMjVJVDJzdFUyc2FFLXFvM2JrQkRRb0xVM05xUTI1SVQyc3RVMnNnQVElM0QlM0QoATAAOABAAEgEUhwIABAAGAAgACoOc3RhdGljY2hlY2tzdW1AAFgDYAFoAHIECAEQAHgA",
|
||||
"url": "/live_chat_replay/get_live_chat_replay?continuation=op2w0wRyGjxDZzhhRFFvTFUzTnFRMjVJVDJzdFUyc2FFLXFvM2JrQkRRb0xVM05xUTI1SVQyc3RVMnNnQVElM0QlM0QoATAAOABAAEgEUhwIABAAGAAgACoOc3RhdGljY2hlY2tzdW1AAFgDYAFoAHIECAEQAHgA",
|
||||
"xsrf_token": "QUFFLUhqbTZWWEFiT3ZxaDAtY09pRzZXSUotZC1uclFMQXxBQ3Jtc0tsOFZYN09CWFlBd2NKSFB4R3hmN3dUY2xXaW9tbzdFZlZBTllDcnBhMG9WUXVkZGZ5RGRIYkxSajBiNVpsNU5PV3hNYkhUZGJybTVEYWM2MWREbTRUYnc3XzRpeUJVbFpNR0dod1RPbGtVLWJhdkhtUVpVN0JKVGNSQVRSY0ZsODhodEwxaWdjN0pHZThlbEJVXzJMc2VXZGtQOXc="
|
||||
}
|
||||
7
tests/testdata/compatible/newSponsor.json
vendored
7
tests/testdata/compatible/newSponsor.json
vendored
@@ -4,7 +4,7 @@
|
||||
"st": 100
|
||||
}
|
||||
},
|
||||
"response": {
|
||||
|
||||
"responseContext": {
|
||||
"serviceTrackingParams": [
|
||||
{
|
||||
@@ -1838,9 +1838,8 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"trackingParams": "CAAQ0b4BIhMI1LbVw_aa5QIV2cxMAh2AtAj8"
|
||||
},
|
||||
"url": "\/live_chat\/get_live_chat?continuation=0ofMyAOBAhrKAUNrd1NJUW9ZVlVOdVVsRlpTRlJ1VWt4VFJqQmpURXAzVFc1bFpFTm5FZ1V2YkdsMlpTb25DaGhWUTI1U1VWbElWRzVTVEZOR01HTk1TbmROYm1Wa1EyY1NDMDFETTNkVlNpMUNXRTVGR2tPcXVjRzlBVDBLTzJoMGRIQnpPaTh2ZDNkM0xubHZkWFIxWW1VdVkyOXRMMnhwZG1WZlkyaGhkRDkyUFUxRE0zZFZTaTFDV0U1RkptbHpYM0J2Y0c5MWREMHhJQUklM0QwAUopCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoAUPOaw8P2muUCWANoAYIBAggB",
|
||||
"trackingParams": "CAAQ0b4BIhMI1LbVw_aa5QIV2cxMAh2AtAj8",
|
||||
"url": "/live_chat/get_live_chat?continuation=0ofMyAOBAhrKAUNrd1NJUW9ZVlVOdVVsRlpTRlJ1VWt4VFJqQmpURXAzVFc1bFpFTm5FZ1V2YkdsMlpTb25DaGhWUTI1U1VWbElWRzVTVEZOR01HTk1TbmROYm1Wa1EyY1NDMDFETTNkVlNpMUNXRTVGR2tPcXVjRzlBVDBLTzJoMGRIQnpPaTh2ZDNkM0xubHZkWFIxWW1VdVkyOXRMMnhwZG1WZlkyaGhkRDkyUFUxRE0zZFZTaTFDV0U1RkptbHpYM0J2Y0c5MWREMHhJQUklM0QwAUopCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoAUPOaw8P2muUCWANoAYIBAggB",
|
||||
"csn": "PvujXbH0OIazqQHXgJ64DQ",
|
||||
"endpoint": {
|
||||
"commandMetadata": {
|
||||
|
||||
1823
tests/testdata/compatible/newSponsor_rev.json
vendored
Normal file
1823
tests/testdata/compatible/newSponsor_rev.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5
tests/testdata/compatible/superchat.json
vendored
5
tests/testdata/compatible/superchat.json
vendored
@@ -15,9 +15,9 @@
|
||||
"url": "/live_chat/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xPREJQZW5SS2FIVTNOemdhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5T0RCUGVuUkthSFUzTnpnbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCja2bHMpPvkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQwJKPoqT75AJYA1DXxuTMpPvkAliL_fvZoPvkAmgBggEECAEQAIgBAKABxM3xzKT75AI%253D"
|
||||
}
|
||||
},
|
||||
"url": "\/live_chat\/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xPREJQZW5SS2FIVTNOemdhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5T0RCUGVuUkthSFUzTnpnbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCja2bHMpPvkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQwJKPoqT75AJYA1DXxuTMpPvkAliL_fvZoPvkAmgBggEECAEQAIgBAKABxM3xzKT75AI%253D",
|
||||
"url": "/live_chat/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xPREJQZW5SS2FIVTNOemdhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5T0RCUGVuUkthSFUzTnpnbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCja2bHMpPvkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQwJKPoqT75AJYA1DXxuTMpPvkAliL_fvZoPvkAmgBggEECAEQAIgBAKABxM3xzKT75AI%253D",
|
||||
"csn": "n2STXd2iKZr2gAOt9qvgCg",
|
||||
"response": {
|
||||
|
||||
"responseContext": {
|
||||
"serviceTrackingParams": [
|
||||
{
|
||||
@@ -278,5 +278,4 @@
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
4
tests/testdata/compatible/supersticker.json
vendored
4
tests/testdata/compatible/supersticker.json
vendored
@@ -15,9 +15,8 @@
|
||||
"url": "/live_chat/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xPREJQZW5SS2FIVTNOemdhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5T0RCUGVuUkthSFUzTnpnbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCja2bHMpPvkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQwJKPoqT75AJYA1DXxuTMpPvkAliL_fvZoPvkAmgBggEECAEQAIgBAKABxM3xzKT75AI%253D"
|
||||
}
|
||||
},
|
||||
"url": "\/live_chat\/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xPREJQZW5SS2FIVTNOemdhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5T0RCUGVuUkthSFUzTnpnbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCja2bHMpPvkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQwJKPoqT75AJYA1DXxuTMpPvkAliL_fvZoPvkAmgBggEECAEQAIgBAKABxM3xzKT75AI%253D",
|
||||
"url": "/live_chat/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xPREJQZW5SS2FIVTNOemdhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5T0RCUGVuUkthSFUzTnpnbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCja2bHMpPvkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQwJKPoqT75AJYA1DXxuTMpPvkAliL_fvZoPvkAmgBggEECAEQAIgBAKABxM3xzKT75AI%253D",
|
||||
"csn": "n2STXd2iKZr2gAOt9qvgCg",
|
||||
"response": {
|
||||
"responseContext": {
|
||||
"serviceTrackingParams": [
|
||||
{
|
||||
@@ -193,5 +192,4 @@
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
4
tests/testdata/compatible/textmessage.json
vendored
4
tests/testdata/compatible/textmessage.json
vendored
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"response": {
|
||||
"responseContext": {
|
||||
"serviceTrackingParams": [
|
||||
{
|
||||
@@ -154,7 +153,6 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"endpoint": {
|
||||
"commandMetadata": {
|
||||
@@ -168,7 +166,7 @@
|
||||
},
|
||||
"csn": "n96GXabRGouFlQTigY2YDg",
|
||||
"xsrf_token": "QUFFLUhqbHNNWTF3NFJqc2h3cGE1NE9FWGdaWk5mRlVhUXxBQ3Jtc0tuTWhZNFcyTW1iZnA3ZnFTYUFudVFEUVE0cnFEOVBGcEU1MEh0Zlh4bll1amVmRl9OMkxZV3pKV1ZSbExBeDctTl95NGtBVnJZdlNxeS1KdWVNempEN2N6MHhaU1laV3hnVkZPeHp1OHVDTGVFSGUyOGduT0szbDV5N05LYUZTdzdoTDRwV1VJWndaVjdQVGRjNWVpR0YwUXgtZXc=",
|
||||
"url": "\/live_chat\/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xhelJMZDBsWFQwdERkalFhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5YXpSTGQwbFhUMHREZGpRbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCiPz5-Os-PkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQgJqXjrPj5AJYA1CRwciOs-PkAli3pNq1k-PkAmgBggEECAEQAIgBAKABjbfnjrPj5AI%253D",
|
||||
"url": "/live_chat/get_live_chat?continuation=0ofMyAPiARp8Q2c4S0RRb0xhelJMZDBsWFQwdERkalFhUTZxNXdiMEJQUW83YUhSMGNITTZMeTkzZDNjdWVXOTFkSFZpWlM1amIyMHZiR2wyWlY5amFHRjBQM1k5YXpSTGQwbFhUMHREZGpRbWFYTmZjRzl3YjNWMFBURWdBZyUzRCUzRCiPz5-Os-PkAjAAOABAAUorCAAQABgAIAAqDnN0YXRpY2NoZWNrc3VtOgBAAEoCCAFQgJqXjrPj5AJYA1CRwciOs-PkAli3pNq1k-PkAmgBggEECAEQAIgBAKABjbfnjrPj5AI%253D",
|
||||
"timing": {
|
||||
"info": {
|
||||
"st": 81
|
||||
|
||||
98
tests/testdata/default/newSponsor_current.json
vendored
Normal file
98
tests/testdata/default/newSponsor_current.json
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"responseContext": {
|
||||
"webResponseContextExtensionData": ""
|
||||
},
|
||||
"continuationContents": {
|
||||
"liveChatContinuation": {
|
||||
"continuations": [
|
||||
{
|
||||
"invalidationContinuationData": {
|
||||
"invalidationId": {
|
||||
"objectSource": 1000,
|
||||
"objectId": "___objectId___",
|
||||
"topic": "chat~00000000000~0000000",
|
||||
"subscribeToGcmTopics": true,
|
||||
"protoCreationTimestampMs": "1577804400000"
|
||||
},
|
||||
"timeoutMs": 10000,
|
||||
"continuation": "___continuation___"
|
||||
}
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"addChatItemAction": {
|
||||
"item": {
|
||||
"liveChatMembershipItemRenderer": {
|
||||
"id": "dummy_id",
|
||||
"timestampUsec": 1570678496000000,
|
||||
"authorExternalChannelId": "author_channel_id",
|
||||
"headerSubtext": {
|
||||
"runs": [
|
||||
{
|
||||
"text": "新規メンバー"
|
||||
}
|
||||
]
|
||||
},
|
||||
"authorName": {
|
||||
"simpleText": "author_name"
|
||||
},
|
||||
"authorPhoto": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 32,
|
||||
"height": 32
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 64,
|
||||
"height": 64
|
||||
}
|
||||
]
|
||||
},
|
||||
"authorBadges": [
|
||||
{
|
||||
"liveChatAuthorBadgeRenderer": {
|
||||
"customThumbnail": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/X=s32-c-k"
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/X=s64-c-k"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": "新規メンバー",
|
||||
"accessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "新規メンバー"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"contextMenuEndpoint": {
|
||||
"commandMetadata": {
|
||||
"webCommandMetadata": {
|
||||
"ignoreNavigation": true
|
||||
}
|
||||
},
|
||||
"liveChatItemContextMenuEndpoint": {
|
||||
"params": "___params___"
|
||||
}
|
||||
},
|
||||
"contextMenuAccessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "コメントの操作"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
80
tests/testdata/default/newSponsor_lagacy.json
vendored
Normal file
80
tests/testdata/default/newSponsor_lagacy.json
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
{
|
||||
"responseContext": {
|
||||
"webResponseContextExtensionData": ""
|
||||
},
|
||||
"continuationContents": {
|
||||
"liveChatContinuation": {
|
||||
"continuations": [
|
||||
{
|
||||
"invalidationContinuationData": {
|
||||
"invalidationId": {
|
||||
"objectSource": 1000,
|
||||
"objectId": "___objectId___",
|
||||
"topic": "chat~00000000000~0000000",
|
||||
"subscribeToGcmTopics": true,
|
||||
"protoCreationTimestampMs": "1577804400000"
|
||||
},
|
||||
"timeoutMs": 10000,
|
||||
"continuation": "___continuation___"
|
||||
}
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"addChatItemAction": {
|
||||
"item": {
|
||||
"liveChatLegacyPaidMessageRenderer": {
|
||||
"id": "dummy_id",
|
||||
"timestampUsec": 1570678496000000,
|
||||
"eventText": {
|
||||
"runs": [
|
||||
{
|
||||
"text": "新規メンバー"
|
||||
}
|
||||
]
|
||||
},
|
||||
"detailText": {
|
||||
"simpleText": "ようこそ、author_name!"
|
||||
},
|
||||
"authorName": {
|
||||
"simpleText": "author_name"
|
||||
},
|
||||
"authorPhoto": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 32,
|
||||
"height": 32
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 64,
|
||||
"height": 64
|
||||
}
|
||||
]
|
||||
},
|
||||
"authorExternalChannelId": "author_channel_id",
|
||||
"contextMenuEndpoint": {
|
||||
"clickTrackingParams": "___clickTrackingParams___",
|
||||
"commandMetadata": {
|
||||
"webCommandMetadata": {
|
||||
"ignoreNavigation": true
|
||||
}
|
||||
},
|
||||
"liveChatItemContextMenuEndpoint": {
|
||||
"params": "___params___"
|
||||
}
|
||||
},
|
||||
"contextMenuAccessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "コメントの操作"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
110
tests/testdata/default/replay_member_text.json
vendored
Normal file
110
tests/testdata/default/replay_member_text.json
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
{
|
||||
"responseContext": {
|
||||
"webResponseContextExtensionData": "data"
|
||||
},
|
||||
"continuationContents": {
|
||||
"liveChatContinuation": {
|
||||
"continuations": [
|
||||
{
|
||||
"liveChatReplayContinuationData": {
|
||||
"invalidationId": {
|
||||
"objectSource": 1000,
|
||||
"objectId": "___objectId___",
|
||||
"topic": "chat~00000000000~0000000",
|
||||
"subscribeToGcmTopics": true,
|
||||
"protoCreationTimestampMs": "1577804400000"
|
||||
},
|
||||
"timeoutMs": 10000,
|
||||
"continuation": "___continuation___"
|
||||
}
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"replayChatItemAction": {
|
||||
"actions": [
|
||||
{
|
||||
"addChatItemAction": {
|
||||
"item": {
|
||||
"liveChatTextMessageRenderer": {
|
||||
"message": {
|
||||
"runs": [
|
||||
{
|
||||
"text": "dummy_message"
|
||||
}
|
||||
]
|
||||
},
|
||||
"authorName": {
|
||||
"simpleText": "author_name"
|
||||
},
|
||||
"authorPhoto": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 32,
|
||||
"height": 32
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 64,
|
||||
"height": 64
|
||||
}
|
||||
]
|
||||
},
|
||||
"contextMenuEndpoint": {
|
||||
"clickTrackingParams": "___clickTrackingParams___",
|
||||
"commandMetadata": {
|
||||
"webCommandMetadata": {
|
||||
"ignoreNavigation": true
|
||||
}
|
||||
},
|
||||
"liveChatItemContextMenuEndpoint": {
|
||||
"params": "___params___"
|
||||
}
|
||||
},
|
||||
"id": "dummy_id",
|
||||
"timestampUsec": 1570678496000000,
|
||||
"authorBadges": [
|
||||
{
|
||||
"liveChatAuthorBadgeRenderer": {
|
||||
"customThumbnail": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/X=s16-c-k"
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/X=s32-c-k"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": "メンバー(1 か月)",
|
||||
"accessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "メンバー(1 か月)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"authorExternalChannelId": "author_channel_id",
|
||||
"contextMenuAccessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "コメントの操作"
|
||||
}
|
||||
},
|
||||
"timestampText": {
|
||||
"simpleText": "1:23:45"
|
||||
}
|
||||
}
|
||||
},
|
||||
"clientId": "dummy_client_id"
|
||||
}
|
||||
}
|
||||
],
|
||||
"videoOffsetTimeMsec": "5025120"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
182
tests/testdata/default/superchat.json
vendored
Normal file
182
tests/testdata/default/superchat.json
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
{
|
||||
"responseContext": {
|
||||
"webResponseContextExtensionData": ""
|
||||
},
|
||||
"continuationContents": {
|
||||
"liveChatContinuation": {
|
||||
"continuations": [
|
||||
{
|
||||
"invalidationContinuationData": {
|
||||
"invalidationId": {
|
||||
"objectSource": 1000,
|
||||
"objectId": "___objectId___",
|
||||
"topic": "chat~00000000000~0000000",
|
||||
"subscribeToGcmTopics": true,
|
||||
"protoCreationTimestampMs": "1577804400000"
|
||||
},
|
||||
"timeoutMs": 10000,
|
||||
"continuation": "___continuation___"
|
||||
}
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"addChatItemAction": {
|
||||
"item": {
|
||||
"liveChatPaidMessageRenderer": {
|
||||
"id": "dummy_id",
|
||||
"timestampUsec": 1570678496000000,
|
||||
"authorName": {
|
||||
"simpleText": "author_name"
|
||||
},
|
||||
"authorPhoto": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 32,
|
||||
"height": 32
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 64,
|
||||
"height": 64
|
||||
}
|
||||
]
|
||||
},
|
||||
"purchaseAmountText": {
|
||||
"simpleText": "¥800"
|
||||
},
|
||||
"message": {
|
||||
"runs": [
|
||||
{
|
||||
"text": "dummy_message"
|
||||
}
|
||||
]
|
||||
},
|
||||
"headerBackgroundColor": 4278239141,
|
||||
"headerTextColor": 4278190080,
|
||||
"bodyBackgroundColor": 4280150454,
|
||||
"bodyTextColor": 4278190080,
|
||||
"authorExternalChannelId": "author_channel_id",
|
||||
"authorNameTextColor": 2315255808,
|
||||
"contextMenuEndpoint": {
|
||||
"commandMetadata": {
|
||||
"webCommandMetadata": {
|
||||
"ignoreNavigation": true
|
||||
}
|
||||
},
|
||||
"liveChatItemContextMenuEndpoint": {
|
||||
"params": "___params___"
|
||||
}
|
||||
},
|
||||
"timestampColor": 2147483648,
|
||||
"contextMenuAccessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "コメントの操作"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"addLiveChatTickerItemAction": {
|
||||
"item": {
|
||||
"liveChatTickerPaidMessageItemRenderer": {
|
||||
"id": "dummy_id",
|
||||
"amount": {
|
||||
"simpleText": "¥846"
|
||||
},
|
||||
"amountTextColor": 4278190080,
|
||||
"startBackgroundColor": 4280150454,
|
||||
"endBackgroundColor": 4278239141,
|
||||
"authorPhoto": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 32,
|
||||
"height": 32
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 64,
|
||||
"height": 64
|
||||
}
|
||||
]
|
||||
},
|
||||
"durationSec": 120,
|
||||
"showItemEndpoint": {
|
||||
"commandMetadata": {
|
||||
"webCommandMetadata": {
|
||||
"ignoreNavigation": true
|
||||
}
|
||||
},
|
||||
"showLiveChatItemEndpoint": {
|
||||
"renderer": {
|
||||
"liveChatPaidMessageRenderer": {
|
||||
"id": "dummy_id",
|
||||
"timestampUsec": 1570678496000000,
|
||||
"authorName": {
|
||||
"simpleText": "author_name"
|
||||
},
|
||||
"authorPhoto": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 32,
|
||||
"height": 32
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 64,
|
||||
"height": 64
|
||||
}
|
||||
]
|
||||
},
|
||||
"purchaseAmountText": {
|
||||
"simpleText": "¥846"
|
||||
},
|
||||
"message": {
|
||||
"runs": [
|
||||
{
|
||||
"text": "dummy_message"
|
||||
}
|
||||
]
|
||||
},
|
||||
"headerBackgroundColor": 4278239141,
|
||||
"headerTextColor": 4278190080,
|
||||
"bodyBackgroundColor": 4280150454,
|
||||
"bodyTextColor": 4278190080,
|
||||
"authorExternalChannelId": "author_channel_id",
|
||||
"authorNameTextColor": 2315255808,
|
||||
"contextMenuEndpoint": {
|
||||
"commandMetadata": {
|
||||
"webCommandMetadata": {
|
||||
"ignoreNavigation": true
|
||||
}
|
||||
},
|
||||
"liveChatItemContextMenuEndpoint": {
|
||||
"params": "___params___"
|
||||
}
|
||||
},
|
||||
"timestampColor": 2147483648,
|
||||
"contextMenuAccessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "コメントの操作"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"authorExternalChannelId": "http://www.youtube.com/channel/author_channel_url",
|
||||
"fullDurationSec": 120
|
||||
}
|
||||
},
|
||||
"durationSec": "120"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
97
tests/testdata/default/supersticker.json
vendored
Normal file
97
tests/testdata/default/supersticker.json
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
{
|
||||
"responseContext": {
|
||||
"webResponseContextExtensionData": ""
|
||||
},
|
||||
"continuationContents": {
|
||||
"liveChatContinuation": {
|
||||
"continuations": [
|
||||
{
|
||||
"invalidationContinuationData": {
|
||||
"invalidationId": {
|
||||
"objectSource": 1000,
|
||||
"objectId": "___objectId___",
|
||||
"topic": "chat~00000000000~0000000",
|
||||
"subscribeToGcmTopics": true,
|
||||
"protoCreationTimestampMs": "1577804400000"
|
||||
},
|
||||
"timeoutMs": 10000,
|
||||
"continuation": "___continuation___"
|
||||
}
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"addChatItemAction": {
|
||||
"item": {
|
||||
"liveChatPaidStickerRenderer": {
|
||||
"id": "dummy_id",
|
||||
"contextMenuEndpoint": {
|
||||
"commandMetadata": {
|
||||
"webCommandMetadata": {
|
||||
"ignoreNavigation": true
|
||||
}
|
||||
},
|
||||
"liveChatItemContextMenuEndpoint": {
|
||||
"params": "___params___"
|
||||
}
|
||||
},
|
||||
"contextMenuAccessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "コメントの操作"
|
||||
}
|
||||
},
|
||||
"timestampUsec": 1570678496000000,
|
||||
"authorPhoto": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s32-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 32,
|
||||
"height": 32
|
||||
},
|
||||
{
|
||||
"url": "https://yt3.ggpht.com/------------/AAAAAAAAAAA/AAAAAAAAAAA/xxxxxxxxxxxx/s64-x-x-xx-xx-xx-c0xffffff/photo.jpg",
|
||||
"width": 64,
|
||||
"height": 64
|
||||
}
|
||||
]
|
||||
},
|
||||
"authorName": {
|
||||
"simpleText": "author_name"
|
||||
},
|
||||
"authorExternalChannelId": "author_channel_id",
|
||||
"sticker": {
|
||||
"thumbnails": [
|
||||
{
|
||||
"url": "//lh3.googleusercontent.com/param_s=s72-rp",
|
||||
"width": 72,
|
||||
"height": 72
|
||||
},
|
||||
{
|
||||
"url": "//lh3.googleusercontent.com/param_s=s144-rp",
|
||||
"width": 144,
|
||||
"height": 144
|
||||
}
|
||||
],
|
||||
"accessibility": {
|
||||
"accessibilityData": {
|
||||
"label": "___sticker_label___"
|
||||
}
|
||||
}
|
||||
},
|
||||
"moneyChipBackgroundColor": 4278248959,
|
||||
"moneyChipTextColor": 4278190080,
|
||||
"purchaseAmountText": {
|
||||
"simpleText": "¥200"
|
||||
},
|
||||
"stickerDisplayWidth": 72,
|
||||
"stickerDisplayHeight": 72,
|
||||
"backgroundColor": 4278237396,
|
||||
"authorNameTextColor": 3003121664
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user