|
|
@ -1486,29 +1486,8 @@ get "/feed/channel/:ucid" do |env|
|
|
|
|
halt env, status_code: 404, response: error_message
|
|
|
|
halt env, status_code: 404, response: error_message
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
client = make_client(YT_URL)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
page = 1
|
|
|
|
page = 1
|
|
|
|
|
|
|
|
videos, count = get_60_videos(ucid, page, auto_generated)
|
|
|
|
videos = [] of SearchVideo
|
|
|
|
|
|
|
|
2.times do |i|
|
|
|
|
|
|
|
|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
|
|
|
|
|
|
|
|
response = client.get(url)
|
|
|
|
|
|
|
|
json = JSON.parse(response.body)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if json["content_html"]? && !json["content_html"].as_s.empty?
|
|
|
|
|
|
|
|
document = XML.parse_html(json["content_html"].as_s)
|
|
|
|
|
|
|
|
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if auto_generated
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset)
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset, ucid)
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
|
|
|
|
host_url = make_host_url(Kemal.config.ssl || CONFIG.https_only, env.request.headers["Host"]?)
|
|
|
|
path = env.request.path
|
|
|
|
path = env.request.path
|
|
|
@ -1753,27 +1732,7 @@ get "/channel/:ucid" do |env|
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
client = make_client(YT_URL)
|
|
|
|
videos, count = get_60_videos(ucid, page, auto_generated)
|
|
|
|
|
|
|
|
|
|
|
|
videos = [] of SearchVideo
|
|
|
|
|
|
|
|
2.times do |i|
|
|
|
|
|
|
|
|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
|
|
|
|
|
|
|
|
response = client.get(url)
|
|
|
|
|
|
|
|
json = JSON.parse(response.body)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if json["content_html"]? && !json["content_html"].as_s.empty?
|
|
|
|
|
|
|
|
document = XML.parse_html(json["content_html"].as_s)
|
|
|
|
|
|
|
|
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if auto_generated
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset)
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset, ucid)
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
templated "channel"
|
|
|
|
templated "channel"
|
|
|
|
end
|
|
|
|
end
|
|
|
@ -2533,30 +2492,10 @@ get "/api/v1/channels/:ucid" do |env|
|
|
|
|
halt env, status_code: 404, response: error_message
|
|
|
|
halt env, status_code: 404, response: error_message
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
client = make_client(YT_URL)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
page = 1
|
|
|
|
page = 1
|
|
|
|
|
|
|
|
videos, count = get_60_videos(ucid, page, auto_generated)
|
|
|
|
|
|
|
|
|
|
|
|
videos = [] of SearchVideo
|
|
|
|
client = make_client(YT_URL)
|
|
|
|
2.times do |i|
|
|
|
|
|
|
|
|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
|
|
|
|
|
|
|
|
response = client.get(url)
|
|
|
|
|
|
|
|
json = JSON.parse(response.body)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if json["content_html"]? && !json["content_html"].as_s.empty?
|
|
|
|
|
|
|
|
document = XML.parse_html(json["content_html"].as_s)
|
|
|
|
|
|
|
|
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if auto_generated
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset)
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset, ucid)
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
|
|
|
|
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
|
|
|
|
channel_html = XML.parse_html(channel_html)
|
|
|
|
channel_html = XML.parse_html(channel_html)
|
|
|
|
banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
|
|
|
|
banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
|
|
|
@ -2692,27 +2631,7 @@ end
|
|
|
|
halt env, status_code: 404, response: error_message
|
|
|
|
halt env, status_code: 404, response: error_message
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
client = make_client(YT_URL)
|
|
|
|
videos, count = get_60_videos(ucid, page, auto_generated)
|
|
|
|
|
|
|
|
|
|
|
|
videos = [] of SearchVideo
|
|
|
|
|
|
|
|
2.times do |i|
|
|
|
|
|
|
|
|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated)
|
|
|
|
|
|
|
|
response = client.get(url)
|
|
|
|
|
|
|
|
json = JSON.parse(response.body)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if json["content_html"]? && !json["content_html"].as_s.empty?
|
|
|
|
|
|
|
|
document = XML.parse_html(json["content_html"].as_s)
|
|
|
|
|
|
|
|
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if auto_generated
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset)
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
videos += extract_videos(nodeset, ucid)
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
else
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
result = JSON.build do |json|
|
|
|
|
result = JSON.build do |json|
|
|
|
|
json.array do
|
|
|
|
json.array do
|
|
|
|