1
0
mirror of https://github.com/mxpv/podsync.git synced 2024-05-11 05:55:04 +00:00

Fix page_size limited at 50 on YouTube

This commit is contained in:
Th0masL
2022-06-13 01:43:56 +03:00
parent 7fc950fba5
commit 5f41d87a8b
2 changed files with 99 additions and 46 deletions

View File

@@ -9,6 +9,7 @@ import (
"strings"
"time"
log "github.com/sirupsen/logrus"
"github.com/BrianHicks/finch/duration"
"github.com/mxpv/podsync/pkg/feed"
"github.com/pkg/errors"
@@ -264,12 +265,59 @@ func (yt *YouTubeBuilder) getSize(duration int64, feed *model.Feed) int64 {
// See https://developers.google.com/youtube/v3/docs/videos/list#part
func (yt *YouTubeBuilder) queryVideoDescriptions(ctx context.Context, playlist map[string]*youtube.PlaylistItemSnippet, feed *model.Feed) error {
// Make the list of video ids
// and count how many API calls will be required
ids := make([]string, 0, len(playlist))
count := 0
count_expected_api_calls := 1
for _, s := range playlist {
count++
ids = append(ids, s.ResourceId.VideoId)
// Increment the counter of expected API calls
if count == maxYoutubeResults {
count_expected_api_calls++
count = 0
}
}
req, err := yt.client.Videos.List("id,snippet,contentDetails").Id(strings.Join(ids, ",")).Context(ctx).Do(yt.key)
log.Debugf("Expected to make %d API calls to get the descriptions for %d episode(s).", count_expected_api_calls, len(ids))
// Init a list that will contains the aggregated strings of videos IDs (capped at 50 IDs per API Calls)
ids_list := make([]string, 0, count_expected_api_calls )
// Init some vars for the logic of breaking the IDs down into groups of 50
total_count_id := 0
count_id := 0
temp_ids_list := make([]string, 0)
for _, id := range ids {
total_count_id++
count_id++
// If we have not yet reached the limit of the YouTube API,
// append this video ID to the temporary list
if count_id <= maxYoutubeResults {
temp_ids_list = append(temp_ids_list, id)
}
// If we have reached the limit of YouTube API,
// convert the temporary ID list into a string and
// save it into the final ID list
if count_id == maxYoutubeResults {
count_id = 0
ids_list = append(ids_list, strings.Join(temp_ids_list, ","))
// Reset the value of the temporary ID list
temp_ids_list = nil
} else if total_count_id == len(playlist) {
// Convert the temporary ID list into a string and append it to the final ID list
ids_list = append(ids_list, strings.Join(temp_ids_list, ","))
// Reset the value of the temporary ID list
temp_ids_list = nil
}
}
// Loop in each list of 50 (or less) IDs and query the description
for list_number := 0; list_number < len(ids_list); list_number++ {
req, err := yt.client.Videos.List("id,snippet,contentDetails").Id(ids_list[list_number]).Context(ctx).Do(yt.key)
if err != nil {
return errors.Wrap(err, "failed to query video descriptions")
}
@@ -326,6 +374,7 @@ func (yt *YouTubeBuilder) queryVideoDescriptions(ctx context.Context, playlist m
Status: model.EpisodeNew,
})
}
}
return nil
}

View File

@@ -157,8 +157,12 @@ func (u *Manager) downloadEpisodes(ctx context.Context, feedConfig *feed.Config)
// Build the list of files to download
if err := u.db.WalkEpisodes(ctx, feedID, func(episode *model.Episode) error {
var (
logger = log.WithFields(log.Fields{"episode_id": episode.ID})
)
if episode.Status != model.EpisodeNew && episode.Status != model.EpisodeError {
// File already downloaded
logger.Infof("skipping due to file already on disk")
return nil
}