Compare commits
576 Commits
throw-erro
...
master
Author | SHA1 | Date |
---|---|---|
Émilien (perso) | 93559cbdd5 | 1 year ago |
Ikko Eltociear Ashimine | 495ccdc221 | 1 year ago |
Samantaz Fox | e6f5fcbc4b | 1 year ago |
Samantaz Fox | df6b51f9c6 | 1 year ago |
lamemakes | 7a569d81ca | 1 year ago |
Émilien (perso) | 875b8ea0c2 | 1 year ago |
Emilien Devos | 8e4833d21a | 1 year ago |
Samantaz Fox | c3a3f98014 | 1 year ago |
Samantaz Fox | b06c87ff8d | 1 year ago |
Samantaz Fox | 69f23d95b8 | 1 year ago |
Samantaz Fox | 3444117818 | 1 year ago |
Samantaz Fox | 39ff94362e | 1 year ago |
Samantaz Fox | 11ab6ffb32 | 1 year ago |
Samantaz Fox | 9dd4195dd0 | 1 year ago |
Samantaz Fox | fcbd5106c3 | 1 year ago |
Samantaz Fox | 836898754e | 1 year ago |
Samantaz Fox | d3b04ac68c | 1 year ago |
Samantaz Fox | b2b61ab0a9 | 1 year ago |
Samantaz Fox | 62bd895562 | 1 year ago |
Samantaz Fox | 8d2ab70cbc | 1 year ago |
Samantaz Fox | 3024424ea2 | 1 year ago |
Samantaz Fox | 5af87f97a3 | 1 year ago |
Samantaz Fox | 96238d719d | 1 year ago |
Daniele Tricoli | 52c317f235 | 1 year ago |
maboroshin | f954483eac | 1 year ago |
Translator | a4ca460651 | 1 year ago |
maboroshin | 37bab74085 | 1 year ago |
Nicolas Dommanget-Muller | 50d6a2afb9 | 1 year ago |
Translator | daccbc2abb | 1 year ago |
04f7rx0n6 | d250b4132b | 1 year ago |
joaooliva | 3690631cdd | 1 year ago |
xrfmkrh | 3b6474d72b | 1 year ago |
maboroshin | fd3e2aa868 | 1 year ago |
gallegonovato | 14a5751a47 | 1 year ago |
Andrey | fda8d2d4d3 | 1 year ago |
Samantaz Fox | 46ea22f75c | 1 year ago |
Samantaz Fox | 68c26563fc | 1 year ago |
TheFrenchGhosty | 9cec83c1ff | 1 year ago |
IceTheDev2 | 281c8ecbf5 | 1 year ago |
Samantaz Fox | 1b942f4f0a | 1 year ago |
Samantaz Fox | e7bed765fe | 1 year ago |
Samantaz Fox | 7556cb69f2 | 1 year ago |
Samantaz Fox | b5e30d66d4 | 1 year ago |
Samantaz Fox | d9521c82cf | 1 year ago |
Samantaz Fox | 505a1566d1 | 1 year ago |
Émilien Devos (perso) | 19192b8be1 | 1 year ago |
Samantaz Fox | 867d488931 | 1 year ago |
Chunky programmer | 45cc835694 | 1 year ago |
Chunky programmer | 233bd3f593 | 1 year ago |
Émilien Devos (perso) | 545a5937d8 | 1 year ago |
Émilien Devos (perso) | 35694cc7e3 | 1 year ago |
Emilien Devos | 372192eabc | 1 year ago |
Émilien Devos (perso) | bc06c2fc27 | 1 year ago |
Émilien Devos (perso) | 7ea6ec1f52 | 1 year ago |
Emilien Devos | 042ad1f266 | 1 year ago |
Samantaz Fox | bbf16279bb | 1 year ago |
Samantaz Fox | 16ac3be85b | 1 year ago |
Samantaz Fox | 21f0b90354 | 1 year ago |
Samantaz Fox | 928ea75dbc | 1 year ago |
Samantaz Fox | 4414c9df70 | 1 year ago |
Samantaz Fox | 898066407d | 1 year ago |
Samantaz Fox | 381a0e326d | 1 year ago |
Samantaz Fox | 193c510c65 | 1 year ago |
Samantaz Fox | f0c8477905 | 1 year ago |
Samantaz Fox | 4379a3d873 | 1 year ago |
Samantaz Fox | df85265453 | 1 year ago |
Samantaz Fox | de78848039 | 1 year ago |
Samantaz Fox | e10f6b6626 | 1 year ago |
Samantaz Fox | 634e913da9 | 1 year ago |
Samantaz Fox | 1b25737b01 | 1 year ago |
Samantaz Fox | 8dd1824869 | 1 year ago |
Samantaz Fox | c7876d564f | 1 year ago |
Samantaz Fox | 5d176ad6de | 1 year ago |
Samantaz Fox | 4b29f8254a | 1 year ago |
Fjuro | c9eafb250f | 1 year ago |
Milo Ivir | fe97b3d761 | 1 year ago |
maboroshin | ed2d16c91d | 1 year ago |
Jeff Huang | a727bb037f | 1 year ago |
Oğuz Ersen | f0f6cb0d83 | 1 year ago |
Eric | e8df08e41e | 1 year ago |
Ihor Hordiichuk | fd06656d86 | 1 year ago |
Jorge Maldonado Ventura | ea6db9c58a | 1 year ago |
Jorge Maldonado Ventura | 184bd3204f | 1 year ago |
atilluF | f0120bece1 | 1 year ago |
Rex_sa | 7e3c685cd6 | 1 year ago |
Matthaiks | 67a79faaeb | 1 year ago |
Ashirg-ch | 11d45adcdc | 1 year ago |
joaooliva | f2cc97b290 | 1 year ago |
Alex | e656714542 | 1 year ago |
maboroshin | a79b7ef170 | 1 year ago |
gallegonovato | ef4ff4e4b2 | 1 year ago |
Samantaz Fox | 9c0c39baed | 1 year ago |
Samantaz Fox | 6440ae0b5c | 2 years ago |
Samantaz Fox | e238c08be5 | 2 years ago |
Samantaz Fox | 8d434ac06a | 2 years ago |
Samantaz Fox | 1333e6db26 | 2 years ago |
ChunkyProgrammer | 8bd2e60abc | 2 years ago |
chunky programmer | c713c32ceb | 2 years ago |
chunky programmer | 12b4dd9191 | 2 years ago |
chunky programmer | b2a0e6f1ff | 2 years ago |
chunky programmer | d728599251 | 2 years ago |
ChunkyProgrammer | d6fb5c03b7 | 2 years ago |
Samantaz Fox | 3a54e9556b | 2 years ago |
Samantaz Fox | 6755e31b72 | 2 years ago |
Samantaz Fox | 10fee9da61 | 2 years ago |
Samantaz Fox | b420de6977 | 2 years ago |
Samantaz Fox | febd14f703 | 2 years ago |
Samantaz Fox | 92f6a4d546 | 2 years ago |
Samantaz Fox | 544fc9f92e | 2 years ago |
Samantaz Fox | c385a944e6 | 2 years ago |
Samantaz Fox | ce1fb8d08c | 2 years ago |
gallegonovato | 56ebb477ca | 2 years ago |
xrfmkrh | cca8bcf2a8 | 2 years ago |
Fjuro | f3d9db10a2 | 2 years ago |
Émilien Devos (perso) | 46a9ce811a | 2 years ago |
Émilien Devos (perso) | 36f7c99cfb | 2 years ago |
Samantaz Fox | 720789b622 | 2 years ago |
Samantaz Fox | ce2649420f | 2 years ago |
Samantaz Fox | 7aac401407 | 2 years ago |
ChunkyProgrammer | 2d5145614b | 2 years ago |
Samantaz Fox | 1eb1bae370 | 2 years ago |
Samantaz Fox | 5017176e39 | 2 years ago |
Samantaz Fox | efda154ec8 | 2 years ago |
Samantaz Fox | c1fb320094 | 2 years ago |
Samantaz Fox | 90914343ec | 2 years ago |
Samantaz Fox | 384a8e200c | 2 years ago |
ChunkyProgrammer | 28584f22c5 | 2 years ago |
chunky programmer | 1b10446e5e | 2 years ago |
chunky programmer | d420741cc1 | 2 years ago |
chunky programmer | f298e225a1 | 2 years ago |
Samantaz Fox | 97e3938f5f | 2 years ago |
Samantaz Fox | deed4d10f2 | 2 years ago |
Артём Котлубай | 70a79f343d | 2 years ago |
Артём Котлубай | e6471feadc | 2 years ago |
John Donne | 49e04192c0 | 2 years ago |
Nicolas Dommanget-Muller | 1f12323ee6 | 2 years ago |
John Donne | 732fb7c499 | 2 years ago |
AHOHNMYC | 14053821ac | 2 years ago |
SC | 346f32855a | 2 years ago |
Ernestas | 7d48b96173 | 2 years ago |
atilluF | b9932b113b | 2 years ago |
Andrey | 72f83d4aa2 | 2 years ago |
Jeff Huang | 919997e41c | 2 years ago |
Damjan Gerl | 6667bdcd92 | 2 years ago |
victor dargallo | cb0e837a5e | 2 years ago |
Fjuro | e6ba3e3dab | 2 years ago |
Milo Ivir | f81bc96da0 | 2 years ago |
abyan akhtar | 4c541489dd | 2 years ago |
Jorge Maldonado Ventura | c60c14851b | 2 years ago |
Rex_sa | d857ee5a7c | 2 years ago |
Oğuz Ersen | 657486c19a | 2 years ago |
Eric | 9d52ddbf8d | 2 years ago |
Ihor Hordiichuk | d8337252a8 | 2 years ago |
gallegonovato | 66e671237f | 2 years ago |
victor dargallo | d5a516d76c | 2 years ago |
maboroshin | 231fb3481e | 2 years ago |
Damjan Gerl | 7b4e3639cf | 2 years ago |
victor dargallo | 778edf63cb | 2 years ago |
SC | fe1648e72e | 2 years ago |
Fjuro | 1825b8edb3 | 2 years ago |
Milo Ivir | a3e587657f | 2 years ago |
Parsa | 4078fc5818 | 2 years ago |
Jeff Huang | a9fcfcf7c9 | 2 years ago |
Oğuz Ersen | 4aa2c406ff | 2 years ago |
Eric | f46cc98654 | 2 years ago |
Ihor Hordiichuk | ec1d6ee851 | 2 years ago |
Jorge Maldonado Ventura | 9eafbbdcbb | 2 years ago |
Jorge Maldonado Ventura | 5c24bf1322 | 2 years ago |
gallegonovato | b97b5b5859 | 2 years ago |
Rex_sa | d139334376 | 2 years ago |
Matthaiks | 155f5fef97 | 2 years ago |
Ashirg-ch | 17ecdbaf7d | 2 years ago |
AHOHNMYC | 67859113fd | 2 years ago |
Samantaz Fox | eefc8bbbdd | 2 years ago |
Samantaz Fox | ff5e42d836 | 2 years ago |
Samantaz Fox | 3cfbc19ccc | 2 years ago |
Samantaz Fox | 7afa03d821 | 2 years ago |
Samantaz Fox | 0107b774f2 | 2 years ago |
Samantaz Fox | 9dfa268204 | 2 years ago |
Samantaz Fox | e24feab1f7 | 2 years ago |
Samantaz Fox | d1b51e57a2 | 2 years ago |
chunky programmer | 5517a4eadb | 2 years ago |
Samantaz Fox | 961cae2b9a | 2 years ago |
Samantaz Fox | adc605024f | 2 years ago |
Samantaz Fox | 9a765418d1 | 2 years ago |
Samantaz Fox | b3c0afef02 | 2 years ago |
Émilien Devos (perso) | 525e4bd67a | 2 years ago |
Gavin | c421f1f205 | 2 years ago |
thtmnisamnstr | fffdaa1410 | 2 years ago |
raphj | 600da635b7 | 2 years ago |
ChunkyProgrammer | e3c1cb3ec9 | 2 years ago |
ChunkyProgrammer | dc929be198 | 2 years ago |
ChunkyProgrammer | 1da00bade3 | 2 years ago |
Samantaz Fox | 8db2a93827 | 2 years ago |
Samantaz Fox | c0eab2b1f6 | 2 years ago |
Samantaz Fox | 9e82e6fc1b | 2 years ago |
Samantaz Fox | ef6eea3a65 | 2 years ago |
Samantaz Fox | d526094380 | 2 years ago |
Samantaz Fox | 562d75a47b | 2 years ago |
Emilien Devos | e0600f4553 | 2 years ago |
Jarek Baran | 0fe1b1ec19 | 2 years ago |
techmetx11 | 73d2ed6f77 | 2 years ago |
Lennart Bernhardt | f83f0d2561 | 2 years ago |
Lennart Bernhardt | 1d187bcf17 | 2 years ago |
ChunkyProgrammer | a3da03bee9 | 2 years ago |
ChunkyProgrammer | f840addd93 | 2 years ago |
techmetx11 | 7755ed4ac8 | 2 years ago |
techmetx11 | 49ddf8b6bd | 2 years ago |
ChunkyProgrammer | 5767344746 | 2 years ago |
ChunkyProgrammer | 3492485789 | 2 years ago |
Samantaz Fox | 8a44bd11d2 | 2 years ago |
Samantaz Fox | c0410602e7 | 2 years ago |
Samantaz Fox | 4ae158ef6d | 2 years ago |
Samantaz Fox | 1f3317e257 | 2 years ago |
victor dargallo | 08cbd44b57 | 2 years ago |
Oğuz Ersen | 224fbcd2b1 | 2 years ago |
victor dargallo | c188dec4fa | 2 years ago |
SC | 3aa6a0c4f0 | 2 years ago |
Fjuro | ce1f61d185 | 2 years ago |
Milo Ivir | c1e45cb84a | 2 years ago |
HamidReza Shareghzade | defec2e8fb | 2 years ago |
maboroshin | ded28b80d3 | 2 years ago |
Jeff Huang | dd6c9dbc65 | 2 years ago |
Eric | 46a7be89a7 | 2 years ago |
Ihor Hordiichuk | 72656e802e | 2 years ago |
gallegonovato | 60e3f8aec0 | 2 years ago |
Rex_sa | aad166c96a | 2 years ago |
Matthaiks | a0bdcc2964 | 2 years ago |
Mateusz Bączek | e1a25a184a | 2 years ago |
Samantaz Fox | 26ea676b8d | 2 years ago |
Samantaz Fox | b66a5c40a9 | 2 years ago |
Émilien Devos (perso) | a6d21cb211 | 2 years ago |
Stéphane | 712aea0831 | 2 years ago |
Samantaz Fox | 6837e42928 | 2 years ago |
ChunkyProgrammer | ffcc837c2a | 2 years ago |
Samantaz Fox | b4806e7ba9 | 2 years ago |
Samantaz Fox | 5c633ad1da | 2 years ago |
Brahim Hadriche | 3848c3f53f | 2 years ago |
thtmnisamnstr | 3341929060 | 2 years ago |
ChunkyProgrammer | a781cf3734 | 2 years ago |
Samantaz Fox | d79d6f38b2 | 2 years ago |
Samantaz Fox | f012d70e47 | 2 years ago |
Samantaz Fox | 01e00a588b | 2 years ago |
Samantaz Fox | 35ac26bd61 | 2 years ago |
ChunkyProgrammer | e3081ef1a9 | 2 years ago |
Brahim Hadriche | 0b17f68eba | 2 years ago |
ChunkyProgrammer | 742c951bc9 | 2 years ago |
ChunkyProgrammer | d8e23d34b6 | 2 years ago |
maboroshin | 548a0f26ef | 2 years ago |
VisualPlugin | 9325fa79ae | 2 years ago |
Felipe Nogueira | 1f607273a8 | 2 years ago |
fresh | 3c3d9ebf84 | 2 years ago |
Émilien Devos (perso) | 6b01629c5d | 2 years ago |
Brahim Hadriche | 025e755542 | 2 years ago |
Paul Fauchon | a3ecd46b01 | 2 years ago |
Paul Fauchon | f6c6c9e5ec | 2 years ago |
Samantaz Fox | bff5c8d9a1 | 2 years ago |
amogusussy | 03542f2f5d | 2 years ago |
Brahim Hadriche | a5cc66e060 | 2 years ago |
Brahim Hadriche | 38f6d08be6 | 2 years ago |
Brahim Hadriche | 8c0efb3ea9 | 2 years ago |
ChunkyProgrammer | 60b7c8015c | 2 years ago |
Samantaz Fox | 406d74d0b6 | 2 years ago |
ChunkyProgrammer | 4a14713462 | 2 years ago |
Brahim Hadriche | 27bf4d02a1 | 2 years ago |
Besnik Bleta | 2974ed348c | 2 years ago |
Milo Ivir | fdf162e318 | 2 years ago |
maboroshin | 24ac873532 | 2 years ago |
Oğuz Ersen | 0efb56238f | 2 years ago |
gallegonovato | eb3af9d4f1 | 2 years ago |
Ashirg-ch | 23f1f8bde3 | 2 years ago |
Ashirg-ch | 3ddcfea8fa | 2 years ago |
techmetx11 | 4ac263f1df | 2 years ago |
techmetx11 | 8eca5b270e | 2 years ago |
thtmnisamnstr | b3eea6ab3e | 2 years ago |
Saurmandal | 7e0210d090 | 2 years ago |
ssantos | 596a16c085 | 2 years ago |
André Marcelo Alvarenga | 57e4312d9f | 2 years ago |
Émilien Devos (perso) | 0995e0447c | 2 years ago |
Brahim Hadriche | 6ee51f460a | 2 years ago |
Brahim Hadriche | 15e9510ab2 | 2 years ago |
Brahim Hadriche | 7b124eec64 | 2 years ago |
Brahim Hadriche | 20289a4d01 | 2 years ago |
Brahim Hadriche | 8445d3ae12 | 2 years ago |
Samantaz Fox | b287ff2126 | 2 years ago |
Andrey | 64780ce1da | 2 years ago |
Raman | 8046316f20 | 2 years ago |
Samantaz Fox | 4bbeb4a4c8 | 2 years ago |
ChunkyProgrammer | b5eb6016bb | 2 years ago |
Wes van der Vleuten | bde21d527f | 2 years ago |
Émilien Devos (perso) | b287ddc52a | 2 years ago |
ChunkyProgrammer | bc5d81fe60 | 2 years ago |
Samantaz Fox | cbbec00e1c | 2 years ago |
Samantaz Fox | ba217c9174 | 2 years ago |
Samantaz Fox | 217b740e01 | 2 years ago |
Émilien Devos (perso) | d6bf9e9bcf | 2 years ago |
Brahim Hadriche | a95f82e44b | 2 years ago |
ChunkyProgrammer | 76ad4e8026 | 2 years ago |
ChunkyProgrammer | d03a62641f | 2 years ago |
ChunkyProgrammer | 4731480821 | 2 years ago |
ChunkyProgrammer | aecbafbc7b | 2 years ago |
ChunkyProgrammer | 8384fa94c2 | 2 years ago |
Samantaz Fox | 7993784701 | 2 years ago |
AHOHNMYC | 9c400fd455 | 2 years ago |
SC | e4d14481c5 | 2 years ago |
Marsel J. Jonker | cb7c4a8220 | 2 years ago |
Damjan Gerl | c5d1344511 | 2 years ago |
Besnik Bleta | 299eb9207b | 2 years ago |
Fjuro | f2390ed052 | 2 years ago |
Milo Ivir | 256b518469 | 2 years ago |
maboroshin | 58688a6311 | 2 years ago |
Jeff Huang | fc5092c399 | 2 years ago |
Oğuz Ersen | 591f816781 | 2 years ago |
Eric | db6d3d2191 | 2 years ago |
Ihor Hordiichuk | 054686e557 | 2 years ago |
Jorge Maldonado Ventura | c1c6f67ad3 | 2 years ago |
Jorge Maldonado Ventura | c82272155e | 2 years ago |
atilluF | 4ca23f2d51 | 2 years ago |
Rex_sa | 45c99190b2 | 2 years ago |
Matthaiks | 7ae9dabe3c | 2 years ago |
Damjan Gerl | 5534cd87f8 | 2 years ago |
Goudarz Jafari | eb7588f1a0 | 2 years ago |
Mateus | 20dc0a9e26 | 2 years ago |
maboroshin | f4de962dc2 | 2 years ago |
eightyy8 | b2f93dc89c | 2 years ago |
SC | bd00b4c730 | 2 years ago |
AHOHNMYC | 4830656484 | 2 years ago |
Samantaz Fox | f9c2412010 | 2 years ago |
Samantaz Fox | 87342e4efd | 2 years ago |
ChunkyProgrammer | 838cbeffcc | 2 years ago |
Samantaz Fox | 27ff8d7c33 | 2 years ago |
Samantaz Fox | feeb872791 | 2 years ago |
thtmnisamnstr | 6f01d6eacf | 2 years ago |
ChunkyProgrammer | e0c70d34cc | 2 years ago |
Brahim Hadriche | d57d278f32 | 2 years ago |
ChunkyProgrammer | b893bdac0d | 2 years ago |
ChunkyProgrammer | 97825be10c | 2 years ago |
ChunkyProgrammer | 28424d0e88 | 2 years ago |
Brahim Hadriche | c37d8e3664 | 2 years ago |
Brahim Hadriche | 47a5b98e25 | 2 years ago |
Brahim Hadriche | 2606decd21 | 2 years ago |
Brahim Hadriche | b2589c74be | 2 years ago |
ChunkyProgrammer | c162c7ff3f | 2 years ago |
Samantaz Fox | d6dd341594 | 2 years ago |
Samantaz Fox | c7f34042a2 | 2 years ago |
Samantaz Fox | 6c687a3cac | 2 years ago |
ChunkyProgrammer | bf5175d1e9 | 2 years ago |
Brahim Hadriche | e7a9aeff95 | 2 years ago |
ChunkyProgrammer | 785fe52674 | 2 years ago |
Gavin Johnson | 72d0c9e409 | 2 years ago |
Gavin Johnson | 5c7bda66ae | 2 years ago |
Gavin Johnson | 96344f28b4 | 2 years ago |
Samantaz Fox | 3b8e6c6040 | 2 years ago |
Macic | 13bf4e9e00 | 2 years ago |
Samantaz Fox | 2a803dc067 | 2 years ago |
ChunkyProgrammer | c2957dbce4 | 2 years ago |
Samantaz Fox | 9a9f8231e8 | 2 years ago |
Hosted Weblate | ad3c721af7 | 2 years ago |
Hosted Weblate | 9b9fde1054 | 2 years ago |
Hosted Weblate | e66e463156 | 2 years ago |
Hosted Weblate | 5c024c677b | 2 years ago |
Hosted Weblate | 68caf355af | 2 years ago |
Hosted Weblate | 32bc44e83b | 2 years ago |
Hosted Weblate | 8cc0f9faf0 | 2 years ago |
Hosted Weblate | 75d136ce77 | 2 years ago |
Hosted Weblate | dd1ffb9283 | 2 years ago |
Hosted Weblate | b3a605c574 | 2 years ago |
Hosted Weblate | f5b3cee263 | 2 years ago |
Hosted Weblate | 24f1d82919 | 2 years ago |
Samantaz Fox | dbee027ed9 | 2 years ago |
Samantaz Fox | 624425cfa8 | 2 years ago |
techmetx11 | caf9520c86 | 2 years ago |
Wes van der Vleuten | 420e12bb8b | 2 years ago |
Wes van der Vleuten | 7fd205179b | 2 years ago |
Wes van der Vleuten | 4aa696fa6e | 2 years ago |
Samantaz Fox | 4e3884cae7 | 2 years ago |
Samantaz Fox | ebc02d0be3 | 2 years ago |
Samantaz Fox | f47d4f88cc | 2 years ago |
Samantaz Fox | cf93c94fc4 | 2 years ago |
Émilien Devos | 030070f1eb | 2 years ago |
hippogriffin | 3509999892 | 2 years ago |
Brahim Hadriche | f6a4d04070 | 2 years ago |
Brahim Hadriche | 0e22a0c21a | 2 years ago |
Samantaz Fox | 0e68756758 | 2 years ago |
Samantaz Fox | a7b2df31f0 | 2 years ago |
Samantaz Fox | ce07f2cd4a | 2 years ago |
DUOLabs333 | ff66cec920 | 2 years ago |
DUO Labs | 67ace4fd9d | 2 years ago |
DUOLabs333 | 86333cd434 | 2 years ago |
Gavin Johnson | 855202e40e | 2 years ago |
DUOLabs333 | 8dcc98b3b9 | 2 years ago |
Brahim Hadriche | 910809f1eb | 2 years ago |
techmetx11 | fe5b81f2c3 | 2 years ago |
Samantaz Fox | ea0d1b6f7b | 2 years ago |
Émilien Devos | c8fecffbbe | 2 years ago |
Samantaz Fox | 215446e638 | 2 years ago |
Samantaz Fox | b779445836 | 2 years ago |
Hosted Weblate | c02ae66bb1 | 2 years ago |
Samantaz Fox | d1bf36bd2b | 2 years ago |
Samantaz Fox | aacf83c06e | 2 years ago |
Samantaz Fox | 1af846e58c | 2 years ago |
Émilien Devos | c012aac997 | 2 years ago |
Émilien Devos | d6087fac47 | 2 years ago |
Samantaz Fox | 4ee483282e | 2 years ago |
Samantaz Fox | 04b97ec261 | 2 years ago |
techmetx11 | 1b5fbfc13e | 2 years ago |
Brahim Hadriche | 01acb9bfbf | 2 years ago |
Brahim Hadriche | 1fb0a49592 | 2 years ago |
DUOLabs333 | 4b2d942024 | 2 years ago |
Samantaz Fox | 05258d56bd | 2 years ago |
marc | 692166bd64 | 2 years ago |
DUOLabs333 | 456e91426a | 2 years ago |
DUOLabs333 | 4fc1b8ae86 | 2 years ago |
DUOLabs333 | 32471382c4 | 2 years ago |
Samantaz Fox | 927c37ce3e | 2 years ago |
Samantaz Fox | a37522a03d | 2 years ago |
Brackets | ed8f02ef01 | 2 years ago |
DUO Labs | 8d08cfe30f | 2 years ago |
Samantaz Fox | 049bfab438 | 2 years ago |
Hosted Weblate | 7f0f40f811 | 2 years ago |
Hosted Weblate | 62b8f8ac80 | 2 years ago |
Hosted Weblate | 16140f8b3f | 2 years ago |
Hosted Weblate | e0275d0908 | 2 years ago |
Hosted Weblate | a57770eb1f | 2 years ago |
Hosted Weblate | 233de2eff9 | 2 years ago |
Hosted Weblate | 9c9d71d41a | 2 years ago |
Hosted Weblate | 6b2fff83b5 | 2 years ago |
Hosted Weblate | 23b229ebb7 | 2 years ago |
Hosted Weblate | 72aa5c94af | 2 years ago |
Hosted Weblate | 4d6ff3a3c6 | 2 years ago |
Hosted Weblate | e2864a5ba1 | 2 years ago |
Hosted Weblate | a36363198c | 2 years ago |
Hosted Weblate | e2ce9c2cee | 2 years ago |
Émilien Devos | 98301a2237 | 2 years ago |
DUOLabs333 | 0d3610f63d | 2 years ago |
DUOLabs333 | 85dd3533bb | 2 years ago |
DUOLabs333 | 76758baab8 | 2 years ago |
DUOLabs333 | 9d83e2da4e | 2 years ago |
DUOLabs333 | 45b8f6d0cd | 2 years ago |
DUOLabs333 | b49ed65a07 | 2 years ago |
DUOLabs333 | 8df1c3bb57 | 2 years ago |
confused_alex | 865704dc7b | 2 years ago |
shironeko | 1aaf290814 | 2 years ago |
brackets0 | 4659e27b56 | 2 years ago |
Samantaz Fox | f9eb839c7a | 2 years ago |
Samantaz Fox | 69b8e0919f | 2 years ago |
Samantaz Fox | 4e3a930626 | 2 years ago |
Samantaz Fox | b6a4de66a5 | 2 years ago |
Samantaz Fox | 40c666cab2 | 2 years ago |
Samantaz Fox | 6c9754e663 | 2 years ago |
Samantaz Fox | 5d6abd5301 | 2 years ago |
Samantaz Fox | 52ef89f02d | 2 years ago |
Samantaz Fox | 2903e896ec | 2 years ago |
Samantaz Fox | c5ee2bfc0f | 2 years ago |
Samantaz Fox | 8e8ca4fcc5 | 2 years ago |
Samantaz Fox | ce7db8d2cb | 2 years ago |
Samantaz Fox | bdc51cd20f | 2 years ago |
Samantaz Fox | 9588fcb5d1 | 2 years ago |
Samantaz Fox | fbcce57ce2 | 2 years ago |
Samantaz Fox | 99bf519781 | 2 years ago |
dev | 1f6c234259 | 2 years ago |
Samantaz Fox | 5160d8bae3 | 2 years ago |
PrivacyDevel | 4fc5d43374 | 2 years ago |
PrivacyDevel | 9656067296 | 2 years ago |
PrivacyDevel | 9eb2ad367e | 2 years ago |
Samantaz Fox | 09b9b758de | 2 years ago |
Samantaz Fox | a46404bf78 | 2 years ago |
Samantaz Fox | c142703453 | 2 years ago |
Samantaz Fox | f44506b7e0 | 2 years ago |
Samantaz Fox | afc0ec3c30 | 2 years ago |
Samantaz Fox | 1bb8f2815d | 2 years ago |
Samantaz Fox | 516efd2df3 | 2 years ago |
Samantaz Fox | 47cc26cb3c | 2 years ago |
Samantaz Fox | cc5c83333f | 2 years ago |
Wes van der Vleuten | d3d9cfdd0d | 2 years ago |
Wes van der Vleuten | c03f92baf7 | 2 years ago |
Wes van der Vleuten | 5bcb5f3175 | 2 years ago |
Wes van der Vleuten | c95ee10d69 | 2 years ago |
Wes van der Vleuten | f604c1c68b | 2 years ago |
Wes van der Vleuten | 7b57381773 | 2 years ago |
Wes van der Vleuten | 437f42250e | 2 years ago |
Samantaz Fox | 09942dee66 | 2 years ago |
Samantaz Fox | 9da1827e95 | 2 years ago |
Samantaz Fox | 758b7df400 | 2 years ago |
Samantaz Fox | 46a63e6150 | 2 years ago |
Samantaz Fox | f267394bbe | 2 years ago |
Samantaz Fox | 2acff70811 | 2 years ago |
Samantaz Fox | db91d3af66 | 2 years ago |
Samantaz Fox | 83795c245a | 2 years ago |
Samantaz Fox | d659a451d6 | 2 years ago |
Samantaz Fox | 87a5d70062 | 2 years ago |
Samantaz Fox | ae03ed7bf7 | 2 years ago |
Samantaz Fox | e23ceb6ae9 | 2 years ago |
Samantaz Fox | 33150f5de3 | 2 years ago |
Samantaz Fox | 7df0cfcbed | 2 years ago |
Samantaz Fox | 907ddfa06a | 2 years ago |
Samantaz Fox | 6aaea7fafa | 2 years ago |
Samantaz Fox | cd03fa06ae | 2 years ago |
Samantaz Fox | 9baaef412f | 2 years ago |
Samantaz Fox | 88141c459c | 2 years ago |
Samantaz Fox | 6250039405 | 2 years ago |
Samantaz Fox | 84cd4d6a5b | 2 years ago |
Samantaz Fox | a1c6159e6f | 2 years ago |
Samantaz Fox | b7555343a0 | 2 years ago |
Samantaz Fox | 4055c3bec8 | 2 years ago |
Samantaz Fox | c5303d55e5 | 2 years ago |
Samantaz Fox | 8096c2d81d | 2 years ago |
Samantaz Fox | 4e1f5c8357 | 2 years ago |
Hosted Weblate | bba693e2af | 2 years ago |
Hosted Weblate | 127bfd5023 | 2 years ago |
Hosted Weblate | 2edfe4a463 | 2 years ago |
Hosted Weblate | 4b1ef90d96 | 2 years ago |
Émilien Devos | 0c7919f3d9 | 2 years ago |
Samantaz Fox | 72cf49eda1 | 2 years ago |
Samantaz Fox | e2ab488e7f | 2 years ago |
thecashewtrader | 1e96206b0b | 2 years ago |
thecashewtrader | 6f301db11c | 2 years ago |
Samantaz Fox | 12db1be87b | 2 years ago |
Samantaz Fox | cdb370f56b | 2 years ago |
Hosted Weblate | ae4f67f39c | 2 years ago |
Hosted Weblate | fcd29a4143 | 2 years ago |
Hosted Weblate | fa544c158a | 2 years ago |
Hosted Weblate | 7f3509aa36 | 2 years ago |
thecashewtrader | a1e0a6b499 | 2 years ago |
thecashewtrader | 6ea3673cf0 | 2 years ago |
Samantaz Fox | 3b39b8c772 | 2 years ago |
thecashewtrader | ffb42a9b23 | 2 years ago |
Samantaz Fox | 6707368f19 | 2 years ago |
Hosted Weblate | 1e186257da | 2 years ago |
Hosted Weblate | d85fcc4e7c | 2 years ago |
Hosted Weblate | 3e13d83ced | 2 years ago |
Hosted Weblate | 14de6a5658 | 2 years ago |
Hosted Weblate | 6100d5f12d | 2 years ago |
Samantaz Fox | dcfa0687f4 | 2 years ago |
Samantaz Fox | a01433960d | 2 years ago |
Benjamin Loison | 18a7ebe3a5 | 2 years ago |
Samantaz Fox | 7069969198 | 2 years ago |
Jakub Filo | 7c45026383 | 2 years ago |
Samantaz Fox | cf12e9dec1 | 2 years ago |
Samantaz Fox | 221d472127 | 2 years ago |
Samantaz Fox | 376ed3f4d3 | 2 years ago |
Samantaz Fox | 7df176d750 | 2 years ago |
Samantaz Fox | 0fa3250f02 | 2 years ago |
Hosted Weblate | 3b439a8fb7 | 2 years ago |
Hosted Weblate | 3a56ed19fe | 2 years ago |
Hosted Weblate | 53662b8400 | 2 years ago |
Hosted Weblate | eac37f1bd4 | 2 years ago |
Hosted Weblate | 1ac5081090 | 2 years ago |
Hosted Weblate | e3de6a4138 | 2 years ago |
Hosted Weblate | fc96ecaa66 | 2 years ago |
Hosted Weblate | 5ca34f3eb5 | 2 years ago |
Hosted Weblate | dcabce50c0 | 2 years ago |
Hosted Weblate | 5b0a4a8db4 | 2 years ago |
Hosted Weblate | b5a2c67d16 | 2 years ago |
Hosted Weblate | f911871990 | 2 years ago |
Chris Helder | c3de622493 | 2 years ago |
Samantaz Fox | 53fb6ad039 | 2 years ago |
Samantaz Fox | 8ab339396a | 2 years ago |
Samantaz Fox | 5048a89b9b | 2 years ago |
Samantaz Fox | feb38f891b | 2 years ago |
Emilien Devos | c658fd27cc | 2 years ago |
Emilien Devos | 260bab598e | 2 years ago |
Emilien Devos | 6f3b4fbaaf | 2 years ago |
Samantaz Fox | 1e7d330350 | 2 years ago |
Émilien Devos | 31244cbcc8 | 2 years ago |
Andrei E | 508a5761a1 | 2 years ago |
Jakub Filo | 4818b89ab1 | 2 years ago |
Hosted Weblate | 4e44123abc | 2 years ago |
Hosted Weblate | 689365d713 | 2 years ago |
Émilien Devos | a7d9df5516 | 2 years ago |
Samantaz Fox | 16b23efb4f | 2 years ago |
Emilien Devos | ca4c2115ee | 2 years ago |
CalculationPaper | c847d6d370 | 2 years ago |
CalculationPaper | bbf66c9b72 | 2 years ago |
Émilien Devos | 7f2ec18372 | 2 years ago |
138138138 | de74056925 | 2 years ago |
@ -0,0 +1,37 @@
|
|||||||
|
name: Close duplicates
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [opened]
|
||||||
|
jobs:
|
||||||
|
run:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions: write-all
|
||||||
|
steps:
|
||||||
|
- uses: iv-org/close-potential-duplicates@v1
|
||||||
|
with:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
# Issue title filter work with anymatch https://www.npmjs.com/package/anymatch.
|
||||||
|
# Any matched issue will stop detection immediately.
|
||||||
|
# You can specify multi filters in each line.
|
||||||
|
filter: ''
|
||||||
|
# Exclude keywords in title before detecting.
|
||||||
|
exclude: ''
|
||||||
|
# Label to set, when potential duplicates are detected.
|
||||||
|
label: duplicate
|
||||||
|
# Get issues with state to compare. Supported state: 'all', 'closed', 'open'.
|
||||||
|
state: open
|
||||||
|
# If similarity is higher than this threshold([0,1]), issue will be marked as duplicate.
|
||||||
|
threshold: 0.9
|
||||||
|
# Reactions to be add to comment when potential duplicates are detected.
|
||||||
|
# Available reactions: "-1", "+1", "confused", "laugh", "heart", "hooray", "rocket", "eyes"
|
||||||
|
reactions: ''
|
||||||
|
close: true
|
||||||
|
# Comment to post when potential duplicates are detected.
|
||||||
|
comment: |
|
||||||
|
Hello, your issue is a duplicate of this/these issue(s): {{#issues}}
|
||||||
|
- #{{ number }} [accuracy: {{ accuracy }}%]
|
||||||
|
{{/issues}}
|
||||||
|
|
||||||
|
If this is a mistake please explain why and ping @\unixfox, @\SamantazFox and @\TheFrenchGhosty.
|
||||||
|
|
||||||
|
Please refrain from opening new issues, it won't help in solving your problem.
|
@ -0,0 +1,24 @@
|
|||||||
|
'use strict';
|
||||||
|
var save_player_pos_key = 'save_player_pos';
|
||||||
|
|
||||||
|
function get_all_video_times() {
|
||||||
|
return helpers.storage.get(save_player_pos_key) || {};
|
||||||
|
}
|
||||||
|
|
||||||
|
document.querySelectorAll('.watched-indicator').forEach(function (indicator) {
|
||||||
|
var watched_part = get_all_video_times()[indicator.dataset.id];
|
||||||
|
var total = parseInt(indicator.dataset.length, 10);
|
||||||
|
if (watched_part === undefined) {
|
||||||
|
watched_part = total;
|
||||||
|
}
|
||||||
|
var percentage = Math.round((watched_part / total) * 100);
|
||||||
|
|
||||||
|
if (percentage < 5) {
|
||||||
|
percentage = 5;
|
||||||
|
}
|
||||||
|
if (percentage > 90) {
|
||||||
|
percentage = 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
indicator.style.width = percentage + '%';
|
||||||
|
});
|
@ -1,6 +1,6 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
- name: postgresql
|
- name: postgresql
|
||||||
repository: https://charts.bitnami.com/bitnami/
|
repository: https://charts.bitnami.com/bitnami/
|
||||||
version: 11.1.3
|
version: 12.1.9
|
||||||
digest: sha256:79061645472b6fb342d45e8e5b3aacd018ef5067193e46a060bccdc99fe7f6e1
|
digest: sha256:71ff342a6c0a98bece3d7fe199983afb2113f8db65a3e3819de875af2c45add7
|
||||||
generated: "2022-03-02T05:57:20.081432389+13:00"
|
generated: "2023-01-20T20:42:32.757707004Z"
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
{}
|
@ -0,0 +1 @@
|
|||||||
|
{}
|
@ -1 +1 @@
|
|||||||
Subproject commit c401dd9203434b561022242c24b0c200d72284c0
|
Subproject commit 11ec372f72747c09d48ffef04843f72be67d5b54
|
@ -0,0 +1,46 @@
|
|||||||
|
require "../spec_helper"
|
||||||
|
|
||||||
|
Spectator.describe "Utils" do
|
||||||
|
describe "decode_date" do
|
||||||
|
it "parses short dates (en-US)" do
|
||||||
|
expect(decode_date("1s ago")).to be_close(Time.utc - 1.second, 500.milliseconds)
|
||||||
|
expect(decode_date("2min ago")).to be_close(Time.utc - 2.minutes, 500.milliseconds)
|
||||||
|
expect(decode_date("3h ago")).to be_close(Time.utc - 3.hours, 500.milliseconds)
|
||||||
|
expect(decode_date("4d ago")).to be_close(Time.utc - 4.days, 500.milliseconds)
|
||||||
|
expect(decode_date("5w ago")).to be_close(Time.utc - 5.weeks, 500.milliseconds)
|
||||||
|
expect(decode_date("6mo ago")).to be_close(Time.utc - 6.months, 500.milliseconds)
|
||||||
|
expect(decode_date("7y ago")).to be_close(Time.utc - 7.years, 500.milliseconds)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "parses short dates (en-GB)" do
|
||||||
|
expect(decode_date("55s ago")).to be_close(Time.utc - 55.seconds, 500.milliseconds)
|
||||||
|
expect(decode_date("44min ago")).to be_close(Time.utc - 44.minutes, 500.milliseconds)
|
||||||
|
expect(decode_date("22hr ago")).to be_close(Time.utc - 22.hours, 500.milliseconds)
|
||||||
|
expect(decode_date("1day ago")).to be_close(Time.utc - 1.day, 500.milliseconds)
|
||||||
|
expect(decode_date("2days ago")).to be_close(Time.utc - 2.days, 500.milliseconds)
|
||||||
|
expect(decode_date("3wk ago")).to be_close(Time.utc - 3.weeks, 500.milliseconds)
|
||||||
|
expect(decode_date("11mo ago")).to be_close(Time.utc - 11.months, 500.milliseconds)
|
||||||
|
expect(decode_date("11yr ago")).to be_close(Time.utc - 11.years, 500.milliseconds)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "parses long forms (singular)" do
|
||||||
|
expect(decode_date("1 second ago")).to be_close(Time.utc - 1.second, 500.milliseconds)
|
||||||
|
expect(decode_date("1 minute ago")).to be_close(Time.utc - 1.minute, 500.milliseconds)
|
||||||
|
expect(decode_date("1 hour ago")).to be_close(Time.utc - 1.hour, 500.milliseconds)
|
||||||
|
expect(decode_date("1 day ago")).to be_close(Time.utc - 1.day, 500.milliseconds)
|
||||||
|
expect(decode_date("1 week ago")).to be_close(Time.utc - 1.week, 500.milliseconds)
|
||||||
|
expect(decode_date("1 month ago")).to be_close(Time.utc - 1.month, 500.milliseconds)
|
||||||
|
expect(decode_date("1 year ago")).to be_close(Time.utc - 1.year, 500.milliseconds)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "parses long forms (plural)" do
|
||||||
|
expect(decode_date("5 seconds ago")).to be_close(Time.utc - 5.seconds, 500.milliseconds)
|
||||||
|
expect(decode_date("17 minutes ago")).to be_close(Time.utc - 17.minutes, 500.milliseconds)
|
||||||
|
expect(decode_date("23 hours ago")).to be_close(Time.utc - 23.hours, 500.milliseconds)
|
||||||
|
expect(decode_date("3 days ago")).to be_close(Time.utc - 3.days, 500.milliseconds)
|
||||||
|
expect(decode_date("2 weeks ago")).to be_close(Time.utc - 2.weeks, 500.milliseconds)
|
||||||
|
expect(decode_date("9 months ago")).to be_close(Time.utc - 9.months, 500.milliseconds)
|
||||||
|
expect(decode_date("8 years ago")).to be_close(Time.utc - 8.years, 500.milliseconds)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
@ -1,113 +1,111 @@
|
|||||||
require "../../parsers_helper.cr"
|
require "../../parsers_helper.cr"
|
||||||
|
|
||||||
Spectator.describe Invidious::Hashtag do
|
Spectator.describe "parse_video_info" do
|
||||||
it "parses scheduled livestreams data (test 1)" do
|
it "parses scheduled livestreams data" do
|
||||||
# Enable mock
|
# Enable mock
|
||||||
_player = load_mock("video/scheduled_live_nintendo.player")
|
_player = load_mock("video/scheduled_live_PBD-Podcast.player")
|
||||||
_next = load_mock("video/scheduled_live_nintendo.next")
|
_next = load_mock("video/scheduled_live_PBD-Podcast.next")
|
||||||
|
|
||||||
raw_data = _player.merge!(_next)
|
raw_data = _player.merge!(_next)
|
||||||
info = parse_video_info("QMGibBzTu0g", raw_data)
|
info = parse_video_info("N-yVic7BbY0", raw_data)
|
||||||
|
|
||||||
# Some basic verifications
|
# Some basic verifications
|
||||||
expect(typeof(info)).to eq(Hash(String, JSON::Any))
|
expect(typeof(info)).to eq(Hash(String, JSON::Any))
|
||||||
|
|
||||||
expect(info["shortDescription"].as_s).to eq(
|
expect(info["videoType"].as_s).to eq("Scheduled")
|
||||||
"Tune in on 6/22 at 7 a.m. PT for a livestreamed Xenoblade Chronicles 3 Direct presentation featuring roughly 20 minutes of information about the upcoming RPG adventure for Nintendo Switch."
|
|
||||||
)
|
|
||||||
expect(info["descriptionHtml"].as_s).to eq(
|
|
||||||
"Tune in on 6/22 at 7 a.m. PT for a livestreamed Xenoblade Chronicles 3 Direct presentation featuring roughly 20 minutes of information about the upcoming RPG adventure for Nintendo Switch."
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(info["likes"].as_i).to eq(2_283)
|
# Basic video infos
|
||||||
|
|
||||||
expect(info["genre"].as_s).to eq("Gaming")
|
expect(info["title"].as_s).to eq("Home Team | PBD Podcast | Ep. 241")
|
||||||
expect(info["genreUrl"].raw).to be_nil
|
expect(info["views"].as_i).to eq(6)
|
||||||
expect(info["genreUcid"].as_s).to be_empty
|
expect(info["likes"].as_i).to eq(7)
|
||||||
expect(info["license"].as_s).to be_empty
|
expect(info["lengthSeconds"].as_i).to eq(0_i64)
|
||||||
|
expect(info["published"].as_s).to eq("2023-02-28T14:00:00Z") # Unix 1677592800
|
||||||
|
|
||||||
expect(info["authorThumbnail"].as_s).to eq(
|
# Extra video infos
|
||||||
"https://yt3.ggpht.com/ytc/AKedOLTt4vtjREUUNdHlyu9c4gtJjG90M9jQheRlLKy44A=s48-c-k-c0x00ffffff-no-rj"
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(info["authorVerified"].as_bool).to be_true
|
expect(info["allowedRegions"].as_a).to_not be_empty
|
||||||
expect(info["subCountText"].as_s).to eq("8.5M")
|
expect(info["allowedRegions"].as_a.size).to eq(249)
|
||||||
|
|
||||||
expect(info["relatedVideos"].as_a.size).to eq(20)
|
expect(info["allowedRegions"].as_a).to contain(
|
||||||
|
"AD", "AR", "BA", "BT", "CZ", "FO", "GL", "IO", "KE", "KH", "LS",
|
||||||
|
"LT", "MP", "NO", "PR", "RO", "SE", "SK", "SS", "SX", "SZ", "ZW"
|
||||||
|
)
|
||||||
|
|
||||||
# related video #1
|
expect(info["keywords"].as_a).to_not be_empty
|
||||||
expect(info["relatedVideos"][3]["id"].as_s).to eq("a-SN3lLIUEo")
|
expect(info["keywords"].as_a.size).to eq(25)
|
||||||
expect(info["relatedVideos"][3]["author"].as_s).to eq("Nintendo")
|
|
||||||
expect(info["relatedVideos"][3]["ucid"].as_s).to eq("UCGIY_O-8vW4rfX98KlMkvRg")
|
expect(info["keywords"].as_a).to contain_exactly(
|
||||||
expect(info["relatedVideos"][3]["view_count"].as_s).to eq("147796")
|
"Patrick Bet-David",
|
||||||
expect(info["relatedVideos"][3]["short_view_count"].as_s).to eq("147K")
|
"Valeutainment",
|
||||||
expect(info["relatedVideos"][3]["author_verified"].as_s).to eq("true")
|
"The BetDavid Podcast",
|
||||||
|
"The BetDavid Show",
|
||||||
# Related video #2
|
"Betdavid",
|
||||||
expect(info["relatedVideos"][16]["id"].as_s).to eq("l_uC1jFK0lo")
|
"PBD",
|
||||||
expect(info["relatedVideos"][16]["author"].as_s).to eq("Nintendo")
|
"BetDavid show",
|
||||||
expect(info["relatedVideos"][16]["ucid"].as_s).to eq("UCGIY_O-8vW4rfX98KlMkvRg")
|
"Betdavid podcast",
|
||||||
expect(info["relatedVideos"][16]["view_count"].as_s).to eq("53510")
|
"podcast betdavid",
|
||||||
expect(info["relatedVideos"][16]["short_view_count"].as_s).to eq("53K")
|
"podcast patrick",
|
||||||
expect(info["relatedVideos"][16]["author_verified"].as_s).to eq("true")
|
"patrick bet david podcast",
|
||||||
end
|
"Valuetainment podcast",
|
||||||
|
"Entrepreneurs",
|
||||||
|
"Entrepreneurship",
|
||||||
|
"Entrepreneur Motivation",
|
||||||
|
"Entrepreneur Advice",
|
||||||
|
"Startup Entrepreneurs",
|
||||||
|
"valuetainment",
|
||||||
|
"patrick bet david",
|
||||||
|
"PBD podcast",
|
||||||
|
"Betdavid show",
|
||||||
|
"Betdavid Podcast",
|
||||||
|
"Podcast Betdavid",
|
||||||
|
"Show Betdavid",
|
||||||
|
"PBDPodcast"
|
||||||
|
).in_any_order
|
||||||
|
|
||||||
|
expect(info["allowRatings"].as_bool).to be_true
|
||||||
|
expect(info["isFamilyFriendly"].as_bool).to be_true
|
||||||
|
expect(info["isListed"].as_bool).to be_true
|
||||||
|
expect(info["isUpcoming"].as_bool).to be_true
|
||||||
|
|
||||||
|
# Related videos
|
||||||
|
|
||||||
it "parses scheduled livestreams data (test 2)" do
|
expect(info["relatedVideos"].as_a.size).to eq(20)
|
||||||
# Enable mock
|
|
||||||
_player = load_mock("video/scheduled_live_PBD-Podcast.player")
|
|
||||||
_next = load_mock("video/scheduled_live_PBD-Podcast.next")
|
|
||||||
|
|
||||||
raw_data = _player.merge!(_next)
|
expect(info["relatedVideos"][0]["id"]).to eq("j7jPzzjbVuk")
|
||||||
info = parse_video_info("RG0cjYbXxME", raw_data)
|
expect(info["relatedVideos"][0]["author"]).to eq("Democracy Now!")
|
||||||
|
expect(info["relatedVideos"][0]["ucid"]).to eq("UCzuqE7-t13O4NIDYJfakrhw")
|
||||||
|
expect(info["relatedVideos"][0]["view_count"]).to eq("7576")
|
||||||
|
expect(info["relatedVideos"][0]["short_view_count"]).to eq("7.5K")
|
||||||
|
expect(info["relatedVideos"][0]["author_verified"]).to eq("true")
|
||||||
|
|
||||||
# Some basic verifications
|
# Description
|
||||||
expect(typeof(info)).to eq(Hash(String, JSON::Any))
|
|
||||||
|
|
||||||
expect(info["shortDescription"].as_s).to start_with(
|
description_start_text = "PBD Podcast Episode 241. The home team is ready and at it again with the latest news, interesting topics and trending conversations on topics that matter. Try our sponsor Aura for 14 days free - https://aura.com/pbd"
|
||||||
<<-TXT
|
|
||||||
PBD Podcast Episode 171. In this episode, Patrick Bet-David is joined by Dr. Patrick Moore and Adam Sosnick.
|
|
||||||
|
|
||||||
Join the channel to get exclusive access to perks: https://bit.ly/3Q9rSQL
|
expect(info["description"].as_s).to start_with(description_start_text)
|
||||||
TXT
|
expect(info["shortDescription"].as_s).to start_with(description_start_text)
|
||||||
)
|
|
||||||
expect(info["descriptionHtml"].as_s).to start_with(
|
|
||||||
<<-TXT
|
|
||||||
PBD Podcast Episode 171. In this episode, Patrick Bet-David is joined by Dr. Patrick Moore and Adam Sosnick.
|
|
||||||
|
|
||||||
Join the channel to get exclusive access to perks: <a href="https://bit.ly/3Q9rSQL">bit.ly/3Q9rSQL</a>
|
# TODO: Update mocks right before the start of PDB podcast, either on friday or saturday (time unknown)
|
||||||
TXT
|
# expect(info["descriptionHtml"].as_s).to start_with(
|
||||||
)
|
# "PBD Podcast Episode 241. The home team is ready and at it again with the latest news, interesting topics and trending conversations on topics that matter. Try our sponsor Aura for 14 days free - <a href=\"https://aura.com/pbd\">aura.com/pbd</a>"
|
||||||
|
# )
|
||||||
|
|
||||||
expect(info["likes"].as_i).to eq(22)
|
# Video metadata
|
||||||
|
|
||||||
expect(info["genre"].as_s).to eq("Entertainment")
|
expect(info["genre"].as_s).to eq("Entertainment")
|
||||||
expect(info["genreUrl"].raw).to be_nil
|
|
||||||
expect(info["genreUcid"].as_s).to be_empty
|
expect(info["genreUcid"].as_s).to be_empty
|
||||||
expect(info["license"].as_s).to be_empty
|
expect(info["license"].as_s).to be_empty
|
||||||
|
|
||||||
|
# Author infos
|
||||||
|
|
||||||
|
expect(info["author"].as_s).to eq("PBD Podcast")
|
||||||
|
expect(info["ucid"].as_s).to eq("UCGX7nGXpz-CmO_Arg-cgJ7A")
|
||||||
|
|
||||||
expect(info["authorThumbnail"].as_s).to eq(
|
expect(info["authorThumbnail"].as_s).to eq(
|
||||||
"https://yt3.ggpht.com/61ArDiQshJrvSXcGLhpFfIO3hlMabe2fksitcf6oGob0Mdr5gztdkXxRljICUodL4iuTSrtxW4A=s48-c-k-c0x00ffffff-no-rj"
|
"https://yt3.ggpht.com/61ArDiQshJrvSXcGLhpFfIO3hlMabe2fksitcf6oGob0Mdr5gztdkXxRljICUodL4iuTSrtxW4A=s48-c-k-c0x00ffffff-no-rj"
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(info["authorVerified"].as_bool).to be_false
|
expect(info["authorVerified"].as_bool).to be_false
|
||||||
expect(info["subCountText"].as_s).to eq("227K")
|
expect(info["subCountText"].as_s).to eq("594K")
|
||||||
|
|
||||||
expect(info["relatedVideos"].as_a.size).to eq(20)
|
|
||||||
|
|
||||||
# related video #1
|
|
||||||
expect(info["relatedVideos"][2]["id"]).to eq("La9oLLoI5Rc")
|
|
||||||
expect(info["relatedVideos"][2]["author"]).to eq("Tom Bilyeu")
|
|
||||||
expect(info["relatedVideos"][2]["ucid"]).to eq("UCnYMOamNKLGVlJgRUbamveA")
|
|
||||||
expect(info["relatedVideos"][2]["view_count"]).to eq("13329149")
|
|
||||||
expect(info["relatedVideos"][2]["short_view_count"]).to eq("13M")
|
|
||||||
expect(info["relatedVideos"][2]["author_verified"]).to eq("true")
|
|
||||||
|
|
||||||
# Related video #2
|
|
||||||
expect(info["relatedVideos"][9]["id"]).to eq("IQ_4fvpzYuA")
|
|
||||||
expect(info["relatedVideos"][9]["author"]).to eq("Business Today")
|
|
||||||
expect(info["relatedVideos"][9]["ucid"]).to eq("UCaPHWiExfUWaKsUtENLCv5w")
|
|
||||||
expect(info["relatedVideos"][9]["view_count"]).to eq("26432")
|
|
||||||
expect(info["relatedVideos"][9]["short_view_count"]).to eq("26K")
|
|
||||||
expect(info["relatedVideos"][9]["author_verified"]).to eq("true")
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,93 +1,28 @@
|
|||||||
def fetch_channel_playlists(ucid, author, continuation, sort_by)
|
def fetch_channel_playlists(ucid, author, continuation, sort_by)
|
||||||
if continuation
|
if continuation
|
||||||
response_json = YoutubeAPI.browse(continuation)
|
initial_data = YoutubeAPI.browse(continuation)
|
||||||
continuation_items = response_json["onResponseReceivedActions"]?
|
|
||||||
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
|
|
||||||
|
|
||||||
return [] of SearchItem, nil if !continuation_items
|
|
||||||
|
|
||||||
items = [] of SearchItem
|
|
||||||
continuation_items.as_a.select(&.as_h.has_key?("gridPlaylistRenderer")).each { |item|
|
|
||||||
extract_item(item, author, ucid).try { |t| items << t }
|
|
||||||
}
|
|
||||||
|
|
||||||
continuation = continuation_items.as_a.last["continuationItemRenderer"]?
|
|
||||||
.try &.["continuationEndpoint"]["continuationCommand"]["token"].as_s
|
|
||||||
else
|
else
|
||||||
url = "/channel/#{ucid}/playlists?flow=list&view=1"
|
params =
|
||||||
|
|
||||||
case sort_by
|
case sort_by
|
||||||
when "last", "last_added"
|
when "last", "last_added"
|
||||||
#
|
# Equivalent to "&sort=lad"
|
||||||
|
# {"2:string": "playlists", "3:varint": 4, "4:varint": 1, "6:varint": 1}
|
||||||
|
"EglwbGF5bGlzdHMYBCABMAE%3D"
|
||||||
when "oldest", "oldest_created"
|
when "oldest", "oldest_created"
|
||||||
url += "&sort=da"
|
# formerly "&sort=da"
|
||||||
|
# Not available anymore :c or maybe ??
|
||||||
|
# {"2:string": "playlists", "3:varint": 2, "4:varint": 1, "6:varint": 1}
|
||||||
|
"EglwbGF5bGlzdHMYAiABMAE%3D"
|
||||||
|
# {"2:string": "playlists", "3:varint": 1, "4:varint": 1, "6:varint": 1}
|
||||||
|
# "EglwbGF5bGlzdHMYASABMAE%3D"
|
||||||
when "newest", "newest_created"
|
when "newest", "newest_created"
|
||||||
url += "&sort=dd"
|
# Formerly "&sort=dd"
|
||||||
else nil # Ignore
|
# {"2:string": "playlists", "3:varint": 3, "4:varint": 1, "6:varint": 1}
|
||||||
end
|
"EglwbGF5bGlzdHMYAyABMAE%3D"
|
||||||
|
|
||||||
response = YT_POOL.client &.get(url)
|
|
||||||
initial_data = extract_initial_data(response.body)
|
|
||||||
return [] of SearchItem, nil if !initial_data
|
|
||||||
|
|
||||||
items = extract_items(initial_data, author, ucid)
|
|
||||||
continuation = response.body.match(/"token":"(?<continuation>[^"]+)"/).try &.["continuation"]?
|
|
||||||
end
|
end
|
||||||
|
|
||||||
return items, continuation
|
initial_data = YoutubeAPI.browse(ucid, params: params || "")
|
||||||
end
|
|
||||||
|
|
||||||
# ## NOTE: DEPRECATED
|
|
||||||
# Reason -> Unstable
|
|
||||||
# The Protobuf object must be provided with an id of the last playlist from the current "page"
|
|
||||||
# in order to fetch the next one accurately
|
|
||||||
# (if the id isn't included, entries shift around erratically between pages,
|
|
||||||
# leading to repetitions and skip overs)
|
|
||||||
#
|
|
||||||
# Since it's impossible to produce the appropriate Protobuf without an id being provided by the user,
|
|
||||||
# it's better to stick to continuation tokens provided by the first request and onward
|
|
||||||
def produce_channel_playlists_url(ucid, cursor, sort = "newest", auto_generated = false)
|
|
||||||
object = {
|
|
||||||
"80226972:embedded" => {
|
|
||||||
"2:string" => ucid,
|
|
||||||
"3:base64" => {
|
|
||||||
"2:string" => "playlists",
|
|
||||||
"6:varint" => 2_i64,
|
|
||||||
"7:varint" => 1_i64,
|
|
||||||
"12:varint" => 1_i64,
|
|
||||||
"13:string" => "",
|
|
||||||
"23:varint" => 0_i64,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if cursor
|
|
||||||
cursor = Base64.urlsafe_encode(cursor, false) if !auto_generated
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = cursor
|
|
||||||
end
|
end
|
||||||
|
|
||||||
if auto_generated
|
return extract_items(initial_data, author, ucid)
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0x32_i64
|
|
||||||
else
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 1_i64
|
|
||||||
case sort
|
|
||||||
when "oldest", "oldest_created"
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 2_i64
|
|
||||||
when "newest", "newest_created"
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 3_i64
|
|
||||||
when "last", "last_added"
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 4_i64
|
|
||||||
else nil # Ignore
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
|
|
||||||
object["80226972:embedded"].delete("3:base64")
|
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
|
||||||
.try { |i| URI.encode_www_form(i) }
|
|
||||||
|
|
||||||
return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
|
|
||||||
end
|
end
|
||||||
|
@ -1,89 +1,176 @@
|
|||||||
def produce_channel_videos_continuation(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
|
def produce_channel_videos_continuation(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
|
||||||
|
object_inner_2 = {
|
||||||
|
"2:0:embedded" => {
|
||||||
|
"1:0:varint" => 0_i64,
|
||||||
|
},
|
||||||
|
"5:varint" => 50_i64,
|
||||||
|
"6:varint" => 1_i64,
|
||||||
|
"7:varint" => (page * 30).to_i64,
|
||||||
|
"9:varint" => 1_i64,
|
||||||
|
"10:varint" => 0_i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
object_inner_2_encoded = object_inner_2
|
||||||
|
.try { |i| Protodec::Any.cast_json(i) }
|
||||||
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
|
.try { |i| URI.encode_www_form(i) }
|
||||||
|
|
||||||
|
sort_by_numerical =
|
||||||
|
case sort_by
|
||||||
|
when "newest" then 1_i64
|
||||||
|
when "popular" then 2_i64
|
||||||
|
when "oldest" then 3_i64 # Broken as of 10/2022 :c
|
||||||
|
else 1_i64 # Fallback to "newest"
|
||||||
|
end
|
||||||
|
|
||||||
|
object_inner_1 = {
|
||||||
|
"110:embedded" => {
|
||||||
|
"3:embedded" => {
|
||||||
|
"15:embedded" => {
|
||||||
|
"1:embedded" => {
|
||||||
|
"1:string" => object_inner_2_encoded,
|
||||||
|
},
|
||||||
|
"2:embedded" => {
|
||||||
|
"1:string" => "00000000-0000-0000-0000-000000000000",
|
||||||
|
},
|
||||||
|
"3:varint" => sort_by_numerical,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
object_inner_1_encoded = object_inner_1
|
||||||
|
.try { |i| Protodec::Any.cast_json(i) }
|
||||||
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
|
.try { |i| URI.encode_www_form(i) }
|
||||||
|
|
||||||
object = {
|
object = {
|
||||||
"80226972:embedded" => {
|
"80226972:embedded" => {
|
||||||
"2:string" => ucid,
|
"2:string" => ucid,
|
||||||
"3:base64" => {
|
"3:string" => object_inner_1_encoded,
|
||||||
"2:string" => "videos",
|
"35:string" => "browse-feed#{ucid}videos102",
|
||||||
"6:varint" => 2_i64,
|
|
||||||
"7:varint" => 1_i64,
|
|
||||||
"12:varint" => 1_i64,
|
|
||||||
"13:string" => "",
|
|
||||||
"23:varint" => 0_i64,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if !v2
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
if auto_generated
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
seed = Time.unix(1525757349)
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
until seed >= Time.utc
|
.try { |i| URI.encode_www_form(i) }
|
||||||
seed += 1.month
|
|
||||||
|
return continuation
|
||||||
end
|
end
|
||||||
timestamp = seed - (page - 1).months
|
|
||||||
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0x36_i64
|
# Used in bypass_captcha_job.cr
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = "#{timestamp.to_unix}"
|
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
|
||||||
else
|
continuation = produce_channel_videos_continuation(ucid, page, auto_generated, sort_by, v2)
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0_i64
|
return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = "#{page}"
|
|
||||||
end
|
end
|
||||||
else
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0_i64
|
|
||||||
|
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["61:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json({
|
module Invidious::Channel::Tabs
|
||||||
"1:string" => Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json({
|
extend self
|
||||||
"1:varint" => 30_i64 * (page - 1),
|
|
||||||
}))),
|
# -------------------
|
||||||
})))
|
# Regular videos
|
||||||
|
# -------------------
|
||||||
|
|
||||||
|
def make_initial_video_ctoken(ucid, sort_by) : String
|
||||||
|
return produce_channel_videos_continuation(ucid, sort_by: sort_by)
|
||||||
end
|
end
|
||||||
|
|
||||||
case sort_by
|
# Wrapper for AboutChannel, as we still need to call get_videos with
|
||||||
when "newest"
|
# an author name and ucid directly (e.g in RSS feeds).
|
||||||
when "popular"
|
# TODO: figure out how to get rid of that
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 0x01_i64
|
def get_videos(channel : AboutChannel, *, continuation : String? = nil, sort_by = "newest")
|
||||||
when "oldest"
|
return get_videos(
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 0x02_i64
|
channel.author, channel.ucid,
|
||||||
else nil # Ignore
|
continuation: continuation, sort_by: sort_by
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
|
# Wrapper for InvidiousChannel, as we still need to call get_videos with
|
||||||
object["80226972:embedded"].delete("3:base64")
|
# an author name and ucid directly (e.g in RSS feeds).
|
||||||
|
# TODO: figure out how to get rid of that
|
||||||
|
def get_videos(channel : InvidiousChannel, *, continuation : String? = nil, sort_by = "newest")
|
||||||
|
return get_videos(
|
||||||
|
channel.author, channel.id,
|
||||||
|
continuation: continuation, sort_by: sort_by
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
def get_videos(author : String, ucid : String, *, continuation : String? = nil, sort_by = "newest")
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
continuation ||= make_initial_video_ctoken(ucid, sort_by)
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
initial_data = YoutubeAPI.browse(continuation: continuation)
|
||||||
.try { |i| URI.encode_www_form(i) }
|
|
||||||
|
|
||||||
return continuation
|
return extract_items(initial_data, author, ucid)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_60_videos(channel : AboutChannel, *, continuation : String? = nil, sort_by = "newest")
|
||||||
|
if continuation.nil?
|
||||||
|
# Fetch the first "page" of video
|
||||||
|
items, next_continuation = get_videos(channel, sort_by: sort_by)
|
||||||
|
else
|
||||||
|
# Fetch a "page" of videos using the given continuation token
|
||||||
|
items, next_continuation = get_videos(channel, continuation: continuation)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_channel_videos_response(ucid, page = 1, auto_generated = nil, sort_by = "newest")
|
# If there is more to load, then load a second "page"
|
||||||
continuation = produce_channel_videos_continuation(ucid, page,
|
# and replace the previous continuation token
|
||||||
auto_generated: auto_generated, sort_by: sort_by, v2: true)
|
if !next_continuation.nil?
|
||||||
|
items_2, next_continuation = get_videos(channel, continuation: next_continuation)
|
||||||
|
items.concat items_2
|
||||||
|
end
|
||||||
|
|
||||||
return YoutubeAPI.browse(continuation)
|
return items, next_continuation
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
|
# -------------------
|
||||||
videos = [] of SearchVideo
|
# Shorts
|
||||||
|
# -------------------
|
||||||
|
|
||||||
2.times do |i|
|
def get_shorts(channel : AboutChannel, continuation : String? = nil)
|
||||||
initial_data = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
|
if continuation.nil?
|
||||||
videos.concat extract_videos(initial_data, author, ucid)
|
# EgZzaG9ydHPyBgUKA5oBAA%3D%3D is the protobuf object to load "shorts"
|
||||||
|
# TODO: try to extract the continuation tokens that allows other sorting options
|
||||||
|
initial_data = YoutubeAPI.browse(channel.ucid, params: "EgZzaG9ydHPyBgUKA5oBAA%3D%3D")
|
||||||
|
else
|
||||||
|
initial_data = YoutubeAPI.browse(continuation: continuation)
|
||||||
|
end
|
||||||
|
return extract_items(initial_data, channel.author, channel.ucid)
|
||||||
end
|
end
|
||||||
|
|
||||||
return videos.size, videos
|
# -------------------
|
||||||
|
# Livestreams
|
||||||
|
# -------------------
|
||||||
|
|
||||||
|
def get_livestreams(channel : AboutChannel, continuation : String? = nil)
|
||||||
|
if continuation.nil?
|
||||||
|
# EgdzdHJlYW1z8gYECgJ6AA%3D%3D is the protobuf object to load "streams"
|
||||||
|
initial_data = YoutubeAPI.browse(channel.ucid, params: "EgdzdHJlYW1z8gYECgJ6AA%3D%3D")
|
||||||
|
else
|
||||||
|
initial_data = YoutubeAPI.browse(continuation: continuation)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_latest_videos(ucid)
|
return extract_items(initial_data, channel.author, channel.ucid)
|
||||||
initial_data = get_channel_videos_response(ucid)
|
end
|
||||||
author = initial_data["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s
|
|
||||||
|
|
||||||
return extract_videos(initial_data, author, ucid)
|
def get_60_livestreams(channel : AboutChannel, continuation : String? = nil)
|
||||||
|
if continuation.nil?
|
||||||
|
# Fetch the first "page" of streams
|
||||||
|
items, next_continuation = get_livestreams(channel)
|
||||||
|
else
|
||||||
|
# Fetch a "page" of streams using the given continuation token
|
||||||
|
items, next_continuation = get_livestreams(channel, continuation: continuation)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Used in bypass_captcha_job.cr
|
# If there is more to load, then load a second "page"
|
||||||
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
|
# and replace the previous continuation token
|
||||||
continuation = produce_channel_videos_continuation(ucid, page, auto_generated, sort_by, v2)
|
if !next_continuation.nil?
|
||||||
return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
|
items_2, next_continuation = get_livestreams(channel, continuation: next_continuation)
|
||||||
|
items.concat items_2
|
||||||
|
end
|
||||||
|
|
||||||
|
return items, next_continuation
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -0,0 +1,89 @@
|
|||||||
|
def text_to_parsed_content(text : String) : JSON::Any
|
||||||
|
nodes = [] of JSON::Any
|
||||||
|
# For each line convert line to array of nodes
|
||||||
|
text.split('\n').each do |line|
|
||||||
|
# In first case line is just a simple node before
|
||||||
|
# check patterns inside line
|
||||||
|
# { 'text': line }
|
||||||
|
currentNodes = [] of JSON::Any
|
||||||
|
initialNode = {"text" => line}
|
||||||
|
currentNodes << (JSON.parse(initialNode.to_json))
|
||||||
|
|
||||||
|
# For each match with url pattern, get last node and preserve
|
||||||
|
# last node before create new node with url information
|
||||||
|
# { 'text': match, 'navigationEndpoint': { 'urlEndpoint' : 'url': match } }
|
||||||
|
line.scan(/https?:\/\/[^ ]*/).each do |urlMatch|
|
||||||
|
# Retrieve last node and update node without match
|
||||||
|
lastNode = currentNodes[currentNodes.size - 1].as_h
|
||||||
|
splittedLastNode = lastNode["text"].as_s.split(urlMatch[0])
|
||||||
|
lastNode["text"] = JSON.parse(splittedLastNode[0].to_json)
|
||||||
|
currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
|
||||||
|
# Create new node with match and navigation infos
|
||||||
|
currentNode = {"text" => urlMatch[0], "navigationEndpoint" => {"urlEndpoint" => {"url" => urlMatch[0]}}}
|
||||||
|
currentNodes << (JSON.parse(currentNode.to_json))
|
||||||
|
# If text remain after match create new simple node with text after match
|
||||||
|
afterNode = {"text" => splittedLastNode.size > 1 ? splittedLastNode[1] : ""}
|
||||||
|
currentNodes << (JSON.parse(afterNode.to_json))
|
||||||
|
end
|
||||||
|
|
||||||
|
# After processing of matches inside line
|
||||||
|
# Add \n at end of last node for preserve carriage return
|
||||||
|
lastNode = currentNodes[currentNodes.size - 1].as_h
|
||||||
|
lastNode["text"] = JSON.parse("#{currentNodes[currentNodes.size - 1]["text"]}\n".to_json)
|
||||||
|
currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
|
||||||
|
|
||||||
|
# Finally add final nodes to nodes returned
|
||||||
|
currentNodes.each do |node|
|
||||||
|
nodes << (node)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return JSON.parse({"runs" => nodes}.to_json)
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse_content(content : JSON::Any, video_id : String? = "") : String
|
||||||
|
content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
|
||||||
|
content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r, video_id).try &.to_s.gsub("\n", "<br>") } || ""
|
||||||
|
end
|
||||||
|
|
||||||
|
def content_to_comment_html(content, video_id : String? = "")
|
||||||
|
html_array = content.map do |run|
|
||||||
|
# Sometimes, there is an empty element.
|
||||||
|
# See: https://github.com/iv-org/invidious/issues/3096
|
||||||
|
next if run.as_h.empty?
|
||||||
|
|
||||||
|
text = HTML.escape(run["text"].as_s)
|
||||||
|
|
||||||
|
if navigationEndpoint = run.dig?("navigationEndpoint")
|
||||||
|
text = parse_link_endpoint(navigationEndpoint, text, video_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
text = "<b>#{text}</b>" if run["bold"]?
|
||||||
|
text = "<s>#{text}</s>" if run["strikethrough"]?
|
||||||
|
text = "<i>#{text}</i>" if run["italics"]?
|
||||||
|
|
||||||
|
# check for custom emojis
|
||||||
|
if run["emoji"]?
|
||||||
|
if run["emoji"]["isCustomEmoji"]?.try &.as_bool
|
||||||
|
if emojiImage = run.dig?("emoji", "image")
|
||||||
|
emojiAlt = emojiImage.dig?("accessibility", "accessibilityData", "label").try &.as_s || text
|
||||||
|
emojiThumb = emojiImage["thumbnails"][0]
|
||||||
|
text = String.build do |str|
|
||||||
|
str << %(<img alt=") << emojiAlt << "\" "
|
||||||
|
str << %(src="/ggpht) << URI.parse(emojiThumb["url"].as_s).request_target << "\" "
|
||||||
|
str << %(title=") << emojiAlt << "\" "
|
||||||
|
str << %(width=") << emojiThumb["width"] << "\" "
|
||||||
|
str << %(height=") << emojiThumb["height"] << "\" "
|
||||||
|
str << %(class="channel-emoji" />)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
# Hide deleted channel emoji
|
||||||
|
text = ""
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
text
|
||||||
|
end
|
||||||
|
|
||||||
|
return html_array.join("").delete('\ufeff')
|
||||||
|
end
|
@ -0,0 +1,76 @@
|
|||||||
|
module Invidious::Comments
|
||||||
|
extend self
|
||||||
|
|
||||||
|
def replace_links(html)
|
||||||
|
# Check if the document is empty
|
||||||
|
# Prevents edge-case bug with Reddit comments, see issue #3115
|
||||||
|
if html.nil? || html.empty?
|
||||||
|
return html
|
||||||
|
end
|
||||||
|
|
||||||
|
html = XML.parse_html(html)
|
||||||
|
|
||||||
|
html.xpath_nodes(%q(//a)).each do |anchor|
|
||||||
|
url = URI.parse(anchor["href"])
|
||||||
|
|
||||||
|
if url.host.nil? || url.host.not_nil!.ends_with?("youtube.com") || url.host.not_nil!.ends_with?("youtu.be")
|
||||||
|
if url.host.try &.ends_with? "youtu.be"
|
||||||
|
url = "/watch?v=#{url.path.lstrip('/')}#{url.query_params}"
|
||||||
|
else
|
||||||
|
if url.path == "/redirect"
|
||||||
|
params = HTTP::Params.parse(url.query.not_nil!)
|
||||||
|
anchor["href"] = params["q"]?
|
||||||
|
else
|
||||||
|
anchor["href"] = url.request_target
|
||||||
|
end
|
||||||
|
end
|
||||||
|
elsif url.to_s == "#"
|
||||||
|
begin
|
||||||
|
length_seconds = decode_length_seconds(anchor.content)
|
||||||
|
rescue ex
|
||||||
|
length_seconds = decode_time(anchor.content)
|
||||||
|
end
|
||||||
|
|
||||||
|
if length_seconds > 0
|
||||||
|
anchor["href"] = "javascript:void(0)"
|
||||||
|
anchor["onclick"] = "player.currentTime(#{length_seconds})"
|
||||||
|
else
|
||||||
|
anchor["href"] = url.request_target
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
html = html.xpath_node(%q(//body)).not_nil!
|
||||||
|
if node = html.xpath_node(%q(./p))
|
||||||
|
html = node
|
||||||
|
end
|
||||||
|
|
||||||
|
return html.to_xml(options: XML::SaveOptions::NO_DECL)
|
||||||
|
end
|
||||||
|
|
||||||
|
def fill_links(html, scheme, host)
|
||||||
|
# Check if the document is empty
|
||||||
|
# Prevents edge-case bug with Reddit comments, see issue #3115
|
||||||
|
if html.nil? || html.empty?
|
||||||
|
return html
|
||||||
|
end
|
||||||
|
|
||||||
|
html = XML.parse_html(html)
|
||||||
|
|
||||||
|
html.xpath_nodes("//a").each do |match|
|
||||||
|
url = URI.parse(match["href"])
|
||||||
|
# Reddit links don't have host
|
||||||
|
if !url.host && !match["href"].starts_with?("javascript") && !url.to_s.ends_with? "#"
|
||||||
|
url.scheme = scheme
|
||||||
|
url.host = host
|
||||||
|
match["href"] = url
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if host == "www.youtube.com"
|
||||||
|
html = html.xpath_node(%q(//body/p)).not_nil!
|
||||||
|
end
|
||||||
|
|
||||||
|
return html.to_xml(options: XML::SaveOptions::NO_DECL)
|
||||||
|
end
|
||||||
|
end
|
@ -0,0 +1,41 @@
|
|||||||
|
module Invidious::Comments
|
||||||
|
extend self
|
||||||
|
|
||||||
|
def fetch_reddit(id, sort_by = "confidence")
|
||||||
|
client = make_client(REDDIT_URL)
|
||||||
|
headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by github.com/iv-org/invidious)"}
|
||||||
|
|
||||||
|
# TODO: Use something like #479 for a static list of instances to use here
|
||||||
|
query = URI::Params.encode({q: "(url:3D#{id} OR url:#{id}) AND (site:invidio.us OR site:youtube.com OR site:youtu.be)"})
|
||||||
|
search_results = client.get("/search.json?#{query}", headers)
|
||||||
|
|
||||||
|
if search_results.status_code == 200
|
||||||
|
search_results = RedditThing.from_json(search_results.body)
|
||||||
|
|
||||||
|
# For videos that have more than one thread, choose the one with the highest score
|
||||||
|
threads = search_results.data.as(RedditListing).children
|
||||||
|
thread = threads.max_by?(&.data.as(RedditLink).score).try(&.data.as(RedditLink))
|
||||||
|
result = thread.try do |t|
|
||||||
|
body = client.get("/r/#{t.subreddit}/comments/#{t.id}.json?limit=100&sort=#{sort_by}", headers).body
|
||||||
|
Array(RedditThing).from_json(body)
|
||||||
|
end
|
||||||
|
result ||= [] of RedditThing
|
||||||
|
elsif search_results.status_code == 302
|
||||||
|
# Previously, if there was only one result then the API would redirect to that result.
|
||||||
|
# Now, it appears it will still return a listing so this section is likely unnecessary.
|
||||||
|
|
||||||
|
result = client.get(search_results.headers["Location"], headers).body
|
||||||
|
result = Array(RedditThing).from_json(result)
|
||||||
|
|
||||||
|
thread = result[0].data.as(RedditListing).children[0].data.as(RedditLink)
|
||||||
|
else
|
||||||
|
raise NotFoundException.new("Comments not found.")
|
||||||
|
end
|
||||||
|
|
||||||
|
client.close
|
||||||
|
|
||||||
|
comments = result[1]?.try(&.data.as(RedditListing).children)
|
||||||
|
comments ||= [] of RedditThing
|
||||||
|
return comments, thread
|
||||||
|
end
|
||||||
|
end
|
@ -0,0 +1,57 @@
|
|||||||
|
class RedditThing
|
||||||
|
include JSON::Serializable
|
||||||
|
|
||||||
|
property kind : String
|
||||||
|
property data : RedditComment | RedditLink | RedditMore | RedditListing
|
||||||
|
end
|
||||||
|
|
||||||
|
class RedditComment
|
||||||
|
include JSON::Serializable
|
||||||
|
|
||||||
|
property author : String
|
||||||
|
property body_html : String
|
||||||
|
property replies : RedditThing | String
|
||||||
|
property score : Int32
|
||||||
|
property depth : Int32
|
||||||
|
property permalink : String
|
||||||
|
|
||||||
|
@[JSON::Field(converter: RedditComment::TimeConverter)]
|
||||||
|
property created_utc : Time
|
||||||
|
|
||||||
|
module TimeConverter
|
||||||
|
def self.from_json(value : JSON::PullParser) : Time
|
||||||
|
Time.unix(value.read_float.to_i)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.to_json(value : Time, json : JSON::Builder)
|
||||||
|
json.number(value.to_unix)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
struct RedditLink
|
||||||
|
include JSON::Serializable
|
||||||
|
|
||||||
|
property author : String
|
||||||
|
property score : Int32
|
||||||
|
property subreddit : String
|
||||||
|
property num_comments : Int32
|
||||||
|
property id : String
|
||||||
|
property permalink : String
|
||||||
|
property title : String
|
||||||
|
end
|
||||||
|
|
||||||
|
struct RedditMore
|
||||||
|
include JSON::Serializable
|
||||||
|
|
||||||
|
property children : Array(String)
|
||||||
|
property count : Int32
|
||||||
|
property depth : Int32
|
||||||
|
end
|
||||||
|
|
||||||
|
class RedditListing
|
||||||
|
include JSON::Serializable
|
||||||
|
|
||||||
|
property children : Array(RedditThing)
|
||||||
|
property modhash : String
|
||||||
|
end
|
@ -0,0 +1,250 @@
|
|||||||
|
module Invidious::Comments
|
||||||
|
extend self
|
||||||
|
|
||||||
|
def fetch_youtube(id, cursor, format, locale, thin_mode, region, sort_by = "top")
|
||||||
|
case cursor
|
||||||
|
when nil, ""
|
||||||
|
ctoken = Comments.produce_continuation(id, cursor: "", sort_by: sort_by)
|
||||||
|
when .starts_with? "ADSJ"
|
||||||
|
ctoken = Comments.produce_continuation(id, cursor: cursor, sort_by: sort_by)
|
||||||
|
else
|
||||||
|
ctoken = cursor
|
||||||
|
end
|
||||||
|
|
||||||
|
client_config = YoutubeAPI::ClientConfig.new(region: region)
|
||||||
|
response = YoutubeAPI.next(continuation: ctoken, client_config: client_config)
|
||||||
|
contents = nil
|
||||||
|
|
||||||
|
if on_response_received_endpoints = response["onResponseReceivedEndpoints"]?
|
||||||
|
header = nil
|
||||||
|
on_response_received_endpoints.as_a.each do |item|
|
||||||
|
if item["reloadContinuationItemsCommand"]?
|
||||||
|
case item["reloadContinuationItemsCommand"]["slot"]
|
||||||
|
when "RELOAD_CONTINUATION_SLOT_HEADER"
|
||||||
|
header = item["reloadContinuationItemsCommand"]["continuationItems"][0]
|
||||||
|
when "RELOAD_CONTINUATION_SLOT_BODY"
|
||||||
|
# continuationItems is nil when video has no comments
|
||||||
|
contents = item["reloadContinuationItemsCommand"]["continuationItems"]?
|
||||||
|
end
|
||||||
|
elsif item["appendContinuationItemsAction"]?
|
||||||
|
contents = item["appendContinuationItemsAction"]["continuationItems"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
elsif response["continuationContents"]?
|
||||||
|
response = response["continuationContents"]
|
||||||
|
if response["commentRepliesContinuation"]?
|
||||||
|
body = response["commentRepliesContinuation"]
|
||||||
|
else
|
||||||
|
body = response["itemSectionContinuation"]
|
||||||
|
end
|
||||||
|
contents = body["contents"]?
|
||||||
|
header = body["header"]?
|
||||||
|
else
|
||||||
|
raise NotFoundException.new("Comments not found.")
|
||||||
|
end
|
||||||
|
|
||||||
|
if !contents
|
||||||
|
if format == "json"
|
||||||
|
return {"comments" => [] of String}.to_json
|
||||||
|
else
|
||||||
|
return {"contentHtml" => "", "commentCount" => 0}.to_json
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
continuation_item_renderer = nil
|
||||||
|
contents.as_a.reject! do |item|
|
||||||
|
if item["continuationItemRenderer"]?
|
||||||
|
continuation_item_renderer = item["continuationItemRenderer"]
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
response = JSON.build do |json|
|
||||||
|
json.object do
|
||||||
|
if header
|
||||||
|
count_text = header["commentsHeaderRenderer"]["countText"]
|
||||||
|
comment_count = (count_text["simpleText"]? || count_text["runs"]?.try &.[0]?.try &.["text"]?)
|
||||||
|
.try &.as_s.gsub(/\D/, "").to_i? || 0
|
||||||
|
json.field "commentCount", comment_count
|
||||||
|
end
|
||||||
|
|
||||||
|
json.field "videoId", id
|
||||||
|
|
||||||
|
json.field "comments" do
|
||||||
|
json.array do
|
||||||
|
contents.as_a.each do |node|
|
||||||
|
json.object do
|
||||||
|
if node["commentThreadRenderer"]?
|
||||||
|
node = node["commentThreadRenderer"]
|
||||||
|
end
|
||||||
|
|
||||||
|
if node["replies"]?
|
||||||
|
node_replies = node["replies"]["commentRepliesRenderer"]
|
||||||
|
end
|
||||||
|
|
||||||
|
if node["comment"]?
|
||||||
|
node_comment = node["comment"]["commentRenderer"]
|
||||||
|
else
|
||||||
|
node_comment = node["commentRenderer"]
|
||||||
|
end
|
||||||
|
|
||||||
|
content_html = node_comment["contentText"]?.try { |t| parse_content(t, id) } || ""
|
||||||
|
author = node_comment["authorText"]?.try &.["simpleText"]? || ""
|
||||||
|
|
||||||
|
json.field "verified", (node_comment["authorCommentBadge"]? != nil)
|
||||||
|
|
||||||
|
json.field "author", author
|
||||||
|
json.field "authorThumbnails" do
|
||||||
|
json.array do
|
||||||
|
node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
|
||||||
|
json.object do
|
||||||
|
json.field "url", thumbnail["url"]
|
||||||
|
json.field "width", thumbnail["width"]
|
||||||
|
json.field "height", thumbnail["height"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if node_comment["authorEndpoint"]?
|
||||||
|
json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
|
||||||
|
json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
|
||||||
|
else
|
||||||
|
json.field "authorId", ""
|
||||||
|
json.field "authorUrl", ""
|
||||||
|
end
|
||||||
|
|
||||||
|
published_text = node_comment["publishedTimeText"]["runs"][0]["text"].as_s
|
||||||
|
published = decode_date(published_text.rchop(" (edited)"))
|
||||||
|
|
||||||
|
if published_text.includes?(" (edited)")
|
||||||
|
json.field "isEdited", true
|
||||||
|
else
|
||||||
|
json.field "isEdited", false
|
||||||
|
end
|
||||||
|
|
||||||
|
json.field "content", html_to_content(content_html)
|
||||||
|
json.field "contentHtml", content_html
|
||||||
|
|
||||||
|
json.field "isPinned", (node_comment["pinnedCommentBadge"]? != nil)
|
||||||
|
json.field "isSponsor", (node_comment["sponsorCommentBadge"]? != nil)
|
||||||
|
if node_comment["sponsorCommentBadge"]?
|
||||||
|
# Sponsor icon thumbnails always have one object and there's only ever the url property in it
|
||||||
|
json.field "sponsorIconUrl", node_comment.dig("sponsorCommentBadge", "sponsorCommentBadgeRenderer", "customBadge", "thumbnails", 0, "url").to_s
|
||||||
|
end
|
||||||
|
json.field "published", published.to_unix
|
||||||
|
json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
|
||||||
|
|
||||||
|
comment_action_buttons_renderer = node_comment["actionButtons"]["commentActionButtonsRenderer"]
|
||||||
|
|
||||||
|
json.field "likeCount", comment_action_buttons_renderer["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"].as_s.scan(/\d/).map(&.[0]).join.to_i
|
||||||
|
json.field "commentId", node_comment["commentId"]
|
||||||
|
json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
|
||||||
|
|
||||||
|
if comment_action_buttons_renderer["creatorHeart"]?
|
||||||
|
hearth_data = comment_action_buttons_renderer["creatorHeart"]["creatorHeartRenderer"]["creatorThumbnail"]
|
||||||
|
json.field "creatorHeart" do
|
||||||
|
json.object do
|
||||||
|
json.field "creatorThumbnail", hearth_data["thumbnails"][-1]["url"]
|
||||||
|
json.field "creatorName", hearth_data["accessibility"]["accessibilityData"]["label"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if node_replies && !response["commentRepliesContinuation"]?
|
||||||
|
if node_replies["continuations"]?
|
||||||
|
continuation = node_replies["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
|
||||||
|
elsif node_replies["contents"]?
|
||||||
|
continuation = node_replies["contents"]?.try &.as_a[0]["continuationItemRenderer"]["continuationEndpoint"]["continuationCommand"]["token"].as_s
|
||||||
|
end
|
||||||
|
continuation ||= ""
|
||||||
|
|
||||||
|
json.field "replies" do
|
||||||
|
json.object do
|
||||||
|
json.field "replyCount", node_comment["replyCount"]? || 1
|
||||||
|
json.field "continuation", continuation
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if continuation_item_renderer
|
||||||
|
if continuation_item_renderer["continuationEndpoint"]?
|
||||||
|
continuation_endpoint = continuation_item_renderer["continuationEndpoint"]
|
||||||
|
elsif continuation_item_renderer["button"]?
|
||||||
|
continuation_endpoint = continuation_item_renderer["button"]["buttonRenderer"]["command"]
|
||||||
|
end
|
||||||
|
if continuation_endpoint
|
||||||
|
json.field "continuation", continuation_endpoint["continuationCommand"]["token"].as_s
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if format == "html"
|
||||||
|
response = JSON.parse(response)
|
||||||
|
content_html = Frontend::Comments.template_youtube(response, locale, thin_mode)
|
||||||
|
|
||||||
|
response = JSON.build do |json|
|
||||||
|
json.object do
|
||||||
|
json.field "contentHtml", content_html
|
||||||
|
|
||||||
|
if response["commentCount"]?
|
||||||
|
json.field "commentCount", response["commentCount"]
|
||||||
|
else
|
||||||
|
json.field "commentCount", 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return response
|
||||||
|
end
|
||||||
|
|
||||||
|
def produce_continuation(video_id, cursor = "", sort_by = "top")
|
||||||
|
object = {
|
||||||
|
"2:embedded" => {
|
||||||
|
"2:string" => video_id,
|
||||||
|
"25:varint" => 0_i64,
|
||||||
|
"28:varint" => 1_i64,
|
||||||
|
"36:embedded" => {
|
||||||
|
"5:varint" => -1_i64,
|
||||||
|
"8:varint" => 0_i64,
|
||||||
|
},
|
||||||
|
"40:embedded" => {
|
||||||
|
"1:varint" => 4_i64,
|
||||||
|
"3:string" => "https://www.youtube.com",
|
||||||
|
"4:string" => "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"3:varint" => 6_i64,
|
||||||
|
"6:embedded" => {
|
||||||
|
"1:string" => cursor,
|
||||||
|
"4:embedded" => {
|
||||||
|
"4:string" => video_id,
|
||||||
|
"6:varint" => 0_i64,
|
||||||
|
},
|
||||||
|
"5:varint" => 20_i64,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
case sort_by
|
||||||
|
when "top"
|
||||||
|
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
|
||||||
|
when "new", "newest"
|
||||||
|
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 1_i64
|
||||||
|
else # top
|
||||||
|
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
|
||||||
|
end
|
||||||
|
|
||||||
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
|
.try { |i| URI.encode_www_form(i) }
|
||||||
|
|
||||||
|
return continuation
|
||||||
|
end
|
||||||
|
end
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue