mirror of
https://github.com/iv-org/invidious.git
synced 2024-11-22 21:47:26 +00:00
Add option to view comments with JS disabled
This commit is contained in:
parent
294c168193
commit
19516eaa25
266
src/invidious.cr
266
src/invidious.cr
@ -229,6 +229,10 @@ get "/watch" do |env|
|
|||||||
end
|
end
|
||||||
|
|
||||||
plid = env.params.query["list"]?
|
plid = env.params.query["list"]?
|
||||||
|
nojs = env.params.query["nojs"]?
|
||||||
|
|
||||||
|
nojs ||= "0"
|
||||||
|
nojs = nojs == "1"
|
||||||
|
|
||||||
user = env.get? "user"
|
user = env.get? "user"
|
||||||
if user
|
if user
|
||||||
@ -255,6 +259,51 @@ get "/watch" do |env|
|
|||||||
next templated "error"
|
next templated "error"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if nojs
|
||||||
|
if preferences
|
||||||
|
source = preferences.comments[0]
|
||||||
|
if source.empty?
|
||||||
|
source = preferences.comments[1]
|
||||||
|
end
|
||||||
|
|
||||||
|
if source == "youtube"
|
||||||
|
begin
|
||||||
|
comments = fetch_youtube_comments(id, "", proxies, "html")
|
||||||
|
comments = JSON.parse(comments)
|
||||||
|
comment_html = template_youtube_comments(comments)
|
||||||
|
rescue ex
|
||||||
|
if preferences.comments[1] == "reddit"
|
||||||
|
comments, reddit_thread = fetch_reddit_comments(id)
|
||||||
|
comment_html = template_reddit_comments(comments)
|
||||||
|
|
||||||
|
comment_html = fill_links(comment_html, "https", "www.reddit.com")
|
||||||
|
comment_html = replace_links(comment_html)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
elsif source == "reddit"
|
||||||
|
begin
|
||||||
|
comments, reddit_thread = fetch_reddit_comments(id)
|
||||||
|
comment_html = template_reddit_comments(comments)
|
||||||
|
|
||||||
|
comment_html = fill_links(comment_html, "https", "www.reddit.com")
|
||||||
|
comment_html = replace_links(comment_html)
|
||||||
|
rescue ex
|
||||||
|
if preferences.comments[1] == "youtube"
|
||||||
|
comments = fetch_youtube_comments(id, "", proxies, "html")
|
||||||
|
comments = JSON.parse(comments)
|
||||||
|
comment_html = template_youtube_comments(comments)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
else
|
||||||
|
comments = fetch_youtube_comments(id, "", proxies, "html")
|
||||||
|
comments = JSON.parse(comments)
|
||||||
|
comment_html = template_youtube_comments(comments)
|
||||||
|
end
|
||||||
|
|
||||||
|
comment_html ||= ""
|
||||||
|
end
|
||||||
|
|
||||||
fmt_stream = video.fmt_stream(decrypt_function)
|
fmt_stream = video.fmt_stream(decrypt_function)
|
||||||
adaptive_fmts = video.adaptive_fmts(decrypt_function)
|
adaptive_fmts = video.adaptive_fmts(decrypt_function)
|
||||||
video_streams = video.video_streams(adaptive_fmts)
|
video_streams = video.video_streams(adaptive_fmts)
|
||||||
@ -1863,212 +1912,15 @@ get "/api/v1/comments/:id" do |env|
|
|||||||
format = env.params.query["format"]?
|
format = env.params.query["format"]?
|
||||||
format ||= "json"
|
format ||= "json"
|
||||||
|
|
||||||
|
continuation = env.params.query["continuation"]?
|
||||||
|
continuation ||= ""
|
||||||
|
|
||||||
if source == "youtube"
|
if source == "youtube"
|
||||||
client = make_client(YT_URL)
|
begin
|
||||||
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
|
comments = fetch_youtube_comments(id, continuation, proxies, format)
|
||||||
headers = HTTP::Headers.new
|
rescue ex
|
||||||
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
|
error_message = {"error" => ex.message}.to_json
|
||||||
body = html.body
|
halt env, status_code: 500, response: error_message
|
||||||
|
|
||||||
session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
|
|
||||||
itct = body.match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
|
|
||||||
ctoken = body.match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
|
|
||||||
|
|
||||||
if body.match(/<meta itemprop="regionsAllowed" content="">/)
|
|
||||||
bypass_channel = Channel({String, HTTPClient, HTTP::Headers} | Nil).new
|
|
||||||
|
|
||||||
proxies.each do |region, list|
|
|
||||||
spawn do
|
|
||||||
proxy_html = %(<meta itemprop="regionsAllowed" content="">)
|
|
||||||
|
|
||||||
list.each do |proxy|
|
|
||||||
begin
|
|
||||||
proxy_client = HTTPClient.new(YT_URL)
|
|
||||||
proxy_client.read_timeout = 10.seconds
|
|
||||||
proxy_client.connect_timeout = 10.seconds
|
|
||||||
|
|
||||||
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
|
||||||
proxy_client.set_proxy(proxy)
|
|
||||||
|
|
||||||
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
|
|
||||||
proxy_headers = HTTP::Headers.new
|
|
||||||
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
|
|
||||||
proxy_html = response.body
|
|
||||||
|
|
||||||
if !proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
|
|
||||||
bypass_channel.send({proxy_html, proxy_client, proxy_headers})
|
|
||||||
break
|
|
||||||
end
|
|
||||||
rescue ex
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# If none of the proxies we tried returned a valid response
|
|
||||||
if proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
|
|
||||||
bypass_channel.send(nil)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
proxies.size.times do
|
|
||||||
response = bypass_channel.receive
|
|
||||||
if response
|
|
||||||
session_token = response[0].match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
|
|
||||||
itct = response[0].match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
|
|
||||||
ctoken = response[0].match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
|
|
||||||
|
|
||||||
client = response[1]
|
|
||||||
headers = response[2]
|
|
||||||
break
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
if !ctoken
|
|
||||||
if format == "json"
|
|
||||||
next {"comments" => [] of String}.to_json
|
|
||||||
else
|
|
||||||
next {"contentHtml" => "", "commentCount" => 0}.to_json
|
|
||||||
end
|
|
||||||
end
|
|
||||||
ctoken = ctoken["ctoken"]
|
|
||||||
|
|
||||||
if env.params.query["continuation"]? && !env.params.query["continuation"].empty?
|
|
||||||
continuation = env.params.query["continuation"]
|
|
||||||
ctoken = continuation
|
|
||||||
else
|
|
||||||
continuation = ctoken
|
|
||||||
end
|
|
||||||
|
|
||||||
post_req = {
|
|
||||||
"session_token" => session_token,
|
|
||||||
}
|
|
||||||
post_req = HTTP::Params.encode(post_req)
|
|
||||||
|
|
||||||
headers["content-type"] = "application/x-www-form-urlencoded"
|
|
||||||
|
|
||||||
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
|
|
||||||
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
|
|
||||||
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
|
|
||||||
|
|
||||||
headers["x-youtube-client-name"] = "1"
|
|
||||||
headers["x-youtube-client-version"] = "2.20180719"
|
|
||||||
response = client.post("/comment_service_ajax?action_get_comments=1&pbj=1&ctoken=#{ctoken}&continuation=#{continuation}&itct=#{itct}&hl=en&gl=US", headers, post_req)
|
|
||||||
response = JSON.parse(response.body)
|
|
||||||
|
|
||||||
if !response["response"]["continuationContents"]?
|
|
||||||
halt env, status_code: 500
|
|
||||||
end
|
|
||||||
|
|
||||||
response = response["response"]["continuationContents"]
|
|
||||||
if response["commentRepliesContinuation"]?
|
|
||||||
body = response["commentRepliesContinuation"]
|
|
||||||
else
|
|
||||||
body = response["itemSectionContinuation"]
|
|
||||||
end
|
|
||||||
contents = body["contents"]?
|
|
||||||
if !contents
|
|
||||||
if format == "json"
|
|
||||||
next {"comments" => [] of String}.to_json
|
|
||||||
else
|
|
||||||
next {"contentHtml" => "", "commentCount" => 0}.to_json
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
comments = JSON.build do |json|
|
|
||||||
json.object do
|
|
||||||
if body["header"]?
|
|
||||||
comment_count = body["header"]["commentsHeaderRenderer"]["countText"]["simpleText"].as_s.delete("Comments,").to_i
|
|
||||||
json.field "commentCount", comment_count
|
|
||||||
end
|
|
||||||
|
|
||||||
json.field "comments" do
|
|
||||||
json.array do
|
|
||||||
contents.as_a.each do |node|
|
|
||||||
json.object do
|
|
||||||
if !response["commentRepliesContinuation"]?
|
|
||||||
node = node["commentThreadRenderer"]
|
|
||||||
end
|
|
||||||
|
|
||||||
if node["replies"]?
|
|
||||||
node_replies = node["replies"]["commentRepliesRenderer"]
|
|
||||||
end
|
|
||||||
|
|
||||||
if !response["commentRepliesContinuation"]?
|
|
||||||
node_comment = node["comment"]["commentRenderer"]
|
|
||||||
else
|
|
||||||
node_comment = node["commentRenderer"]
|
|
||||||
end
|
|
||||||
|
|
||||||
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
|
|
||||||
if content_html
|
|
||||||
content_html = HTML.escape(content_html)
|
|
||||||
end
|
|
||||||
|
|
||||||
content_html ||= content_to_comment_html(node_comment["contentText"]["runs"].as_a)
|
|
||||||
content_html, content = html_to_content(content_html)
|
|
||||||
|
|
||||||
author = node_comment["authorText"]?.try &.["simpleText"]
|
|
||||||
author ||= ""
|
|
||||||
|
|
||||||
json.field "author", author
|
|
||||||
json.field "authorThumbnails" do
|
|
||||||
json.array do
|
|
||||||
node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
|
|
||||||
json.object do
|
|
||||||
json.field "url", thumbnail["url"]
|
|
||||||
json.field "width", thumbnail["width"]
|
|
||||||
json.field "height", thumbnail["height"]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
if node_comment["authorEndpoint"]?
|
|
||||||
json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
|
|
||||||
json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
|
|
||||||
else
|
|
||||||
json.field "authorId", ""
|
|
||||||
json.field "authorUrl", ""
|
|
||||||
end
|
|
||||||
|
|
||||||
published = decode_date(node_comment["publishedTimeText"]["runs"][0]["text"].as_s.rchop(" (edited)"))
|
|
||||||
|
|
||||||
json.field "content", content
|
|
||||||
json.field "contentHtml", content_html
|
|
||||||
json.field "published", published.epoch
|
|
||||||
json.field "publishedText", "#{recode_date(published)} ago"
|
|
||||||
json.field "likeCount", node_comment["likeCount"]
|
|
||||||
json.field "commentId", node_comment["commentId"]
|
|
||||||
|
|
||||||
if node_replies && !response["commentRepliesContinuation"]?
|
|
||||||
reply_count = node_replies["moreText"]["simpleText"].as_s.delete("View all reply replies,")
|
|
||||||
if reply_count.empty?
|
|
||||||
reply_count = 1
|
|
||||||
else
|
|
||||||
reply_count = reply_count.try &.to_i?
|
|
||||||
reply_count ||= 1
|
|
||||||
end
|
|
||||||
|
|
||||||
continuation = node_replies["continuations"].as_a[0]["nextContinuationData"]["continuation"].as_s
|
|
||||||
|
|
||||||
json.field "replies" do
|
|
||||||
json.object do
|
|
||||||
json.field "replyCount", reply_count
|
|
||||||
json.field "continuation", continuation
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
if body["continuations"]?
|
|
||||||
continuation = body["continuations"][0]["nextContinuationData"]["continuation"]
|
|
||||||
json.field "continuation", continuation
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
if format == "json"
|
if format == "json"
|
||||||
@ -2092,10 +1944,8 @@ get "/api/v1/comments/:id" do |env|
|
|||||||
next response
|
next response
|
||||||
end
|
end
|
||||||
elsif source == "reddit"
|
elsif source == "reddit"
|
||||||
client = make_client(REDDIT_URL)
|
|
||||||
headers = HTTP::Headers{"User-Agent" => "web:invidio.us:v0.6.0 (by /u/omarroth)"}
|
|
||||||
begin
|
begin
|
||||||
comments, reddit_thread = get_reddit_comments(id, client, headers)
|
comments, reddit_thread = fetch_reddit_comments(id)
|
||||||
content_html = template_reddit_comments(comments)
|
content_html = template_reddit_comments(comments)
|
||||||
|
|
||||||
content_html = fill_links(content_html, "https", "www.reddit.com")
|
content_html = fill_links(content_html, "https", "www.reddit.com")
|
||||||
|
@ -56,7 +56,221 @@ class RedditListing
|
|||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_reddit_comments(id, client, headers)
|
def fetch_youtube_comments(id, continuation, proxies, format)
|
||||||
|
client = make_client(YT_URL)
|
||||||
|
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
|
||||||
|
headers = HTTP::Headers.new
|
||||||
|
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
|
||||||
|
body = html.body
|
||||||
|
|
||||||
|
session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
|
||||||
|
itct = body.match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
|
||||||
|
ctoken = body.match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
|
||||||
|
|
||||||
|
if body.match(/<meta itemprop="regionsAllowed" content="">/)
|
||||||
|
bypass_channel = Channel({String, HTTPClient, HTTP::Headers} | Nil).new
|
||||||
|
|
||||||
|
proxies.each do |region, list|
|
||||||
|
spawn do
|
||||||
|
proxy_html = %(<meta itemprop="regionsAllowed" content="">)
|
||||||
|
|
||||||
|
list.each do |proxy|
|
||||||
|
begin
|
||||||
|
proxy_client = HTTPClient.new(YT_URL)
|
||||||
|
proxy_client.read_timeout = 10.seconds
|
||||||
|
proxy_client.connect_timeout = 10.seconds
|
||||||
|
|
||||||
|
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
||||||
|
proxy_client.set_proxy(proxy)
|
||||||
|
|
||||||
|
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
|
||||||
|
proxy_headers = HTTP::Headers.new
|
||||||
|
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
|
||||||
|
proxy_html = response.body
|
||||||
|
|
||||||
|
if !proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
|
||||||
|
bypass_channel.send({proxy_html, proxy_client, proxy_headers})
|
||||||
|
break
|
||||||
|
end
|
||||||
|
rescue ex
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# If none of the proxies we tried returned a valid response
|
||||||
|
if proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
|
||||||
|
bypass_channel.send(nil)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
proxies.size.times do
|
||||||
|
response = bypass_channel.receive
|
||||||
|
if response
|
||||||
|
session_token = response[0].match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
|
||||||
|
itct = response[0].match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
|
||||||
|
ctoken = response[0].match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
|
||||||
|
|
||||||
|
client = response[1]
|
||||||
|
headers = response[2]
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if !ctoken
|
||||||
|
if format == "json"
|
||||||
|
return {"comments" => [] of String}.to_json
|
||||||
|
else
|
||||||
|
return {"contentHtml" => "", "commentCount" => 0}.to_json
|
||||||
|
end
|
||||||
|
end
|
||||||
|
ctoken = ctoken["ctoken"]
|
||||||
|
|
||||||
|
if !continuation.empty?
|
||||||
|
ctoken = continuation
|
||||||
|
else
|
||||||
|
continuation = ctoken
|
||||||
|
end
|
||||||
|
|
||||||
|
post_req = {
|
||||||
|
"session_token" => session_token,
|
||||||
|
}
|
||||||
|
post_req = HTTP::Params.encode(post_req)
|
||||||
|
|
||||||
|
headers["content-type"] = "application/x-www-form-urlencoded"
|
||||||
|
|
||||||
|
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
|
||||||
|
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
|
||||||
|
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
|
||||||
|
|
||||||
|
headers["x-youtube-client-name"] = "1"
|
||||||
|
headers["x-youtube-client-version"] = "2.20180719"
|
||||||
|
response = client.post("/comment_service_ajax?action_get_comments=1&pbj=1&ctoken=#{ctoken}&continuation=#{continuation}&itct=#{itct}&hl=en&gl=US", headers, post_req)
|
||||||
|
response = JSON.parse(response.body)
|
||||||
|
|
||||||
|
if !response["response"]["continuationContents"]?
|
||||||
|
raise "Could not fetch comments"
|
||||||
|
end
|
||||||
|
|
||||||
|
response = response["response"]["continuationContents"]
|
||||||
|
if response["commentRepliesContinuation"]?
|
||||||
|
body = response["commentRepliesContinuation"]
|
||||||
|
else
|
||||||
|
body = response["itemSectionContinuation"]
|
||||||
|
end
|
||||||
|
|
||||||
|
contents = body["contents"]?
|
||||||
|
if !contents
|
||||||
|
if format == "json"
|
||||||
|
return {"comments" => [] of String}.to_json
|
||||||
|
else
|
||||||
|
return {"contentHtml" => "", "commentCount" => 0}.to_json
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
comments = JSON.build do |json|
|
||||||
|
json.object do
|
||||||
|
if body["header"]?
|
||||||
|
comment_count = body["header"]["commentsHeaderRenderer"]["countText"]["simpleText"].as_s.delete("Comments,").to_i
|
||||||
|
json.field "commentCount", comment_count
|
||||||
|
end
|
||||||
|
|
||||||
|
json.field "comments" do
|
||||||
|
json.array do
|
||||||
|
contents.as_a.each do |node|
|
||||||
|
json.object do
|
||||||
|
if !response["commentRepliesContinuation"]?
|
||||||
|
node = node["commentThreadRenderer"]
|
||||||
|
end
|
||||||
|
|
||||||
|
if node["replies"]?
|
||||||
|
node_replies = node["replies"]["commentRepliesRenderer"]
|
||||||
|
end
|
||||||
|
|
||||||
|
if !response["commentRepliesContinuation"]?
|
||||||
|
node_comment = node["comment"]["commentRenderer"]
|
||||||
|
else
|
||||||
|
node_comment = node["commentRenderer"]
|
||||||
|
end
|
||||||
|
|
||||||
|
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
|
||||||
|
if content_html
|
||||||
|
content_html = HTML.escape(content_html)
|
||||||
|
end
|
||||||
|
|
||||||
|
content_html ||= content_to_comment_html(node_comment["contentText"]["runs"].as_a)
|
||||||
|
content_html, content = html_to_content(content_html)
|
||||||
|
|
||||||
|
author = node_comment["authorText"]?.try &.["simpleText"]
|
||||||
|
author ||= ""
|
||||||
|
|
||||||
|
json.field "author", author
|
||||||
|
json.field "authorThumbnails" do
|
||||||
|
json.array do
|
||||||
|
node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
|
||||||
|
json.object do
|
||||||
|
json.field "url", thumbnail["url"]
|
||||||
|
json.field "width", thumbnail["width"]
|
||||||
|
json.field "height", thumbnail["height"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if node_comment["authorEndpoint"]?
|
||||||
|
json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
|
||||||
|
json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
|
||||||
|
else
|
||||||
|
json.field "authorId", ""
|
||||||
|
json.field "authorUrl", ""
|
||||||
|
end
|
||||||
|
|
||||||
|
published = decode_date(node_comment["publishedTimeText"]["runs"][0]["text"].as_s.rchop(" (edited)"))
|
||||||
|
|
||||||
|
json.field "content", content
|
||||||
|
json.field "contentHtml", content_html
|
||||||
|
json.field "published", published.epoch
|
||||||
|
json.field "publishedText", "#{recode_date(published)} ago"
|
||||||
|
json.field "likeCount", node_comment["likeCount"]
|
||||||
|
json.field "commentId", node_comment["commentId"]
|
||||||
|
|
||||||
|
if node_replies && !response["commentRepliesContinuation"]?
|
||||||
|
reply_count = node_replies["moreText"]["simpleText"].as_s.delete("View all reply replies,")
|
||||||
|
if reply_count.empty?
|
||||||
|
reply_count = 1
|
||||||
|
else
|
||||||
|
reply_count = reply_count.try &.to_i?
|
||||||
|
reply_count ||= 1
|
||||||
|
end
|
||||||
|
|
||||||
|
continuation = node_replies["continuations"].as_a[0]["nextContinuationData"]["continuation"].as_s
|
||||||
|
|
||||||
|
json.field "replies" do
|
||||||
|
json.object do
|
||||||
|
json.field "replyCount", reply_count
|
||||||
|
json.field "continuation", continuation
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if body["continuations"]?
|
||||||
|
continuation = body["continuations"][0]["nextContinuationData"]["continuation"]
|
||||||
|
json.field "continuation", continuation
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return comments
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_reddit_comments(id)
|
||||||
|
client = make_client(REDDIT_URL)
|
||||||
|
headers = HTTP::Headers{"User-Agent" => "web:invidio.us:v0.11.0 (by /u/omarroth)"}
|
||||||
|
|
||||||
query = "(url:3D#{id}%20OR%20url:#{id})%20(site:youtube.com%20OR%20site:youtu.be)"
|
query = "(url:3D#{id}%20OR%20url:#{id})%20(site:youtube.com%20OR%20site:youtu.be)"
|
||||||
search_results = client.get("/search.json?q=#{query}", headers)
|
search_results = client.get("/search.json?q=#{query}", headers)
|
||||||
|
|
||||||
|
@ -121,6 +121,14 @@
|
|||||||
</div>
|
</div>
|
||||||
<hr>
|
<hr>
|
||||||
<div id="comments">
|
<div id="comments">
|
||||||
|
<% if nojs %>
|
||||||
|
<%= comment_html %>
|
||||||
|
<% else %>
|
||||||
|
<noscript>
|
||||||
|
Hi! Looks like you have JavaScript disabled. Click <a href="/watch?<%= env.params.query %>&nojs=1">here</a> to view
|
||||||
|
comments, keep in mind it may take a bit longer to load.
|
||||||
|
</noscript>
|
||||||
|
<% end %>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
Loading…
Reference in New Issue
Block a user