From 4f856dd898b20ddc905e7592701eb685813fa262 Mon Sep 17 00:00:00 2001
From: Omar Roth #{child["contentHtml"]}
", "\n")
@@ -2370,7 +2370,7 @@ get "/api/v1/channels/:ucid" do |env|
total_views = 0_i64
sub_count = 0_i64
- joined = Time.epoch(0)
+ joined = Time.unix(0)
metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item|
case item.content
@@ -2426,7 +2426,7 @@ get "/api/v1/channels/:ucid" do |env|
json.field "subCount", sub_count
json.field "totalViews", total_views
- json.field "joined", joined.epoch
+ json.field "joined", joined.to_unix
json.field "paid", paid
json.field "isFamilyFriendly", is_family_friendly
@@ -2460,7 +2460,7 @@ get "/api/v1/channels/:ucid" do |env|
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
- json.field "published", video.published.epoch
+ json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid
@@ -2517,7 +2517,7 @@ end
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
- json.field "published", video.published.epoch
+ json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid
@@ -2565,7 +2565,7 @@ get "/api/v1/channels/search/:ucid" do |env|
json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views
- json.field "published", item.published.epoch
+ json.field "published", item.published.to_unix
json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now
@@ -2688,7 +2688,7 @@ get "/api/v1/search" do |env|
json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views
- json.field "published", item.published.epoch
+ json.field "published", item.published.to_unix
json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now
@@ -2809,7 +2809,7 @@ get "/api/v1/playlists/:plid" do |env|
json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views
- json.field "updated", playlist.updated.epoch
+ json.field "updated", playlist.updated.to_unix
json.field "videos" do
json.array do
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index d3f4588d..dcab5e29 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -165,14 +165,14 @@ end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated
- seed = Time.epoch(1525757349)
+ seed = Time.unix(1525757349)
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
- page = "#{timestamp.epoch}"
+ page = "#{timestamp.to_unix}"
switch = "\x36"
else
page = "#{page}"
diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr
index 94c4698e..a699aaac 100644
--- a/src/invidious/comments.cr
+++ b/src/invidious/comments.cr
@@ -8,11 +8,11 @@ end
class RedditComment
module TimeConverter
def self.from_json(value : JSON::PullParser) : Time
- Time.epoch(value.read_float.to_i)
+ Time.unix(value.read_float.to_i)
end
def self.to_json(value : Time, json : JSON::Builder)
- json.number(value.epoch)
+ json.number(value.to_unix)
end
end
@@ -58,7 +58,7 @@ end
def fetch_youtube_comments(id, continuation, proxies, format)
client = make_client(YT_URL)
- html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
+ html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
headers = HTTP::Headers.new
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
body = html.body
@@ -83,7 +83,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
proxy_client.set_proxy(proxy)
- response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
+ response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
proxy_headers = HTTP::Headers.new
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
proxy_html = response.body
@@ -140,8 +140,8 @@ def fetch_youtube_comments(id, continuation, proxies, format)
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
- headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
- headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
+ headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
+ headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
@@ -229,7 +229,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
json.field "content", content
json.field "contentHtml", content_html
- json.field "published", published.epoch
+ json.field "published", published.to_unix
json.field "publishedText", "#{recode_date(published)} ago"
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
@@ -327,7 +327,7 @@ def template_youtube_comments(comments)
#{child["author"]}