Merge pull request #1849 from saltycrys/channels

WIP channels fix
This commit is contained in:
TheFrenchGhosty 2021-03-06 10:06:20 +01:00 committed by GitHub
commit ec30f7c5d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 36 additions and 14 deletions

View File

@ -233,7 +233,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
videos = [] of SearchVideo videos = [] of SearchVideo
begin begin
initial_data = JSON.parse(response.body).as_a.find &.["response"]? initial_data = JSON.parse(response.body)
raise InfoException.new("Could not extract channel JSON") if !initial_data raise InfoException.new("Could not extract channel JSON") if !initial_data
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data") LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
@ -305,7 +305,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
loop do loop do
response = get_channel_videos_response(ucid, page, auto_generated: auto_generated) response = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
initial_data = JSON.parse(response.body).as_a.find &.["response"]? initial_data = JSON.parse(response.body)
raise InfoException.new("Could not extract channel JSON") if !initial_data raise InfoException.new("Could not extract channel JSON") if !initial_data
videos = extract_videos(initial_data.as_h, author, ucid) videos = extract_videos(initial_data.as_h, author, ucid)
@ -388,7 +388,7 @@ def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
return items, continuation return items, continuation
end end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false) def produce_channel_videos_continuation(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
object = { object = {
"80226972:embedded" => { "80226972:embedded" => {
"2:string" => ucid, "2:string" => ucid,
@ -444,6 +444,11 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
.try { |i| Base64.urlsafe_encode(i) } .try { |i| Base64.urlsafe_encode(i) }
.try { |i| URI.encode_www_form(i) } .try { |i| URI.encode_www_form(i) }
return continuation
end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
continuation = produce_channel_videos_continuation(ucid, page, auto_generated, sort_by, v2)
return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en" return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
end end
@ -932,17 +937,35 @@ def get_about_info(ucid, locale)
}) })
end end
def get_channel_videos_response(ucid, page = 1, auto_generated = nil, sort_by = "newest") def get_channel_videos_response(ucid, page = 1, auto_generated = nil, sort_by = "newest", youtubei_browse = true)
if youtubei_browse
continuation = produce_channel_videos_continuation(ucid, page, auto_generated: auto_generated, sort_by: sort_by, v2: true)
data = {
"context": {
"client": {
"clientName": "WEB",
"clientVersion": "2.20201021.03.00",
},
},
"continuation": continuation,
}.to_json
return YT_POOL.client &.post(
"/youtubei/v1/browse?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8",
headers: HTTP::Headers{"content-type" => "application/json"},
body: data
)
else
url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated, sort_by: sort_by, v2: true) url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated, sort_by: sort_by, v2: true)
return YT_POOL.client &.get(url) return YT_POOL.client &.get(url)
end end
end
def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest") def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
videos = [] of SearchVideo videos = [] of SearchVideo
2.times do |i| 2.times do |i|
response = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by) response = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
initial_data = JSON.parse(response.body).as_a.find &.["response"]? initial_data = JSON.parse(response.body)
break if !initial_data break if !initial_data
videos.concat extract_videos(initial_data.as_h, author, ucid) videos.concat extract_videos(initial_data.as_h, author, ucid)
end end
@ -951,10 +974,10 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
end end
def get_latest_videos(ucid) def get_latest_videos(ucid)
response = get_channel_videos_response(ucid, 1) response = get_channel_videos_response(ucid)
initial_data = JSON.parse(response.body).as_a.find &.["response"]? initial_data = JSON.parse(response.body)
return [] of SearchVideo if !initial_data return [] of SearchVideo if !initial_data
author = initial_data["response"]?.try &.["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s author = initial_data["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s
items = extract_videos(initial_data.as_h, author, ucid) items = extract_videos(initial_data.as_h, author, ucid)
return items return items

View File

@ -383,7 +383,6 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
items = [] of SearchItem items = [] of SearchItem
channel_v2_response = initial_data channel_v2_response = initial_data
.try &.["response"]?
.try &.["continuationContents"]? .try &.["continuationContents"]?
.try &.["gridContinuation"]? .try &.["gridContinuation"]?
.try &.["items"]? .try &.["items"]?