diff options
author | Benedikt Peetz <benedikt.peetz@b-peetz.de> | 2024-10-14 14:56:29 +0200 |
---|---|---|
committer | Benedikt Peetz <benedikt.peetz@b-peetz.de> | 2024-10-14 14:56:29 +0200 |
commit | 6c9286857ef8b314962b67f4a16a66e8c35531bc (patch) | |
tree | 9ced4485ec38b39f82cba258c06321a21c40000a /yt/src | |
parent | build(Cargo.toml): Add further lints (diff) | |
download | yt-6c9286857ef8b314962b67f4a16a66e8c35531bc.zip |
refactor(treewide): Combine the separate crates in one workspace
Diffstat (limited to 'yt/src')
37 files changed, 5302 insertions, 0 deletions
diff --git a/yt/src/app.rs b/yt/src/app.rs new file mode 100644 index 0000000..b7d136e --- /dev/null +++ b/yt/src/app.rs @@ -0,0 +1,41 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use anyhow::{Context, Result}; +use sqlx::{query, sqlite::SqliteConnectOptions, SqlitePool}; + +use crate::config::Config; + +pub struct App { + pub database: SqlitePool, + pub config: Config, +} + +impl App { + pub async fn new(config: Config) -> Result<Self> { + let options = SqliteConnectOptions::new() + .filename(&config.paths.database_path) + .optimize_on_close(true, None) + .create_if_missing(true); + + let pool = SqlitePool::connect_with(options) + .await + .context("Failed to connect to database!")?; + + query(include_str!("storage/video_database/schema.sql")) + .execute(&pool) + .await?; + + Ok(App { + database: pool, + config, + }) + } +} diff --git a/yt/src/cache/mod.rs b/yt/src/cache/mod.rs new file mode 100644 index 0000000..a3e08c9 --- /dev/null +++ b/yt/src/cache/mod.rs @@ -0,0 +1,88 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use anyhow::{Context, Result}; +use log::info; +use tokio::fs; + +use crate::{ + app::App, + storage::video_database::{ + downloader::set_video_cache_path, getters::get_videos, setters::set_state_change, Video, + VideoStatus, + }, +}; + +async fn invalidate_video(app: &App, video: &Video, hard: bool) -> Result<()> { + info!("Invalidating cache of video: '{}'", video.title); + + if hard { + if let Some(path) = &video.cache_path { + info!("Removing cached video at: '{}'", path.display()); + fs::remove_file(path).await.with_context(|| { + format!( + "Failed to delete video ('{}') cache path: '{}'.", + video.title, + path.display() + ) + })?; + } + } + + set_video_cache_path(app, &video.extractor_hash, None).await?; + + Ok(()) +} + +pub async fn invalidate(app: &App, hard: bool) -> Result<()> { + let all_cached_things = get_videos(app, &[VideoStatus::Cached], None).await?; + + info!("Got videos to invalidate: '{}'", all_cached_things.len()); + + for video in all_cached_things { + invalidate_video(app, &video, hard).await? + } + + Ok(()) +} + +pub async fn maintain(app: &App, all: bool) -> Result<()> { + let domain = if all { + vec![ + VideoStatus::Pick, + // + VideoStatus::Watch, + VideoStatus::Cached, + VideoStatus::Watched, + // + VideoStatus::Drop, + VideoStatus::Dropped, + ] + } else { + vec![VideoStatus::Watch, VideoStatus::Cached] + }; + + let cached_videos = get_videos(app, domain.as_slice(), None).await?; + + for vid in cached_videos { + if let Some(path) = vid.cache_path.as_ref() { + info!("Checking if path ('{}') exists", path.display()); + if !path.exists() { + invalidate_video(app, &vid, false).await?; + } + } + if vid.status_change { + info!("Video '{}' has it's changing bit set. This is probably the result of an unexpectet exit. Clearing it", vid.title); + set_state_change(app, &vid.extractor_hash, false).await?; + } + } + + Ok(()) +} diff --git a/yt/src/cli.rs b/yt/src/cli.rs new file mode 100644 index 0000000..d19586e --- /dev/null +++ b/yt/src/cli.rs @@ -0,0 +1,318 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::path::PathBuf; + +use anyhow::Context; +use bytes::Bytes; +use chrono::NaiveDate; +use clap::{ArgAction, Args, Parser, Subcommand}; +use url::Url; + +use crate::{ + select::selection_file::duration::Duration, + storage::video_database::extractor_hash::LazyExtractorHash, +}; + +#[derive(Parser, Debug)] +#[clap(author, version, about, long_about = None)] +/// An command line interface to select, download and watch videos +pub struct CliArgs { + #[command(subcommand)] + /// The subcommand to execute [default: select] + pub command: Option<Command>, + + /// Increase message verbosity + #[arg(long="verbose", short = 'v', action = ArgAction::Count)] + pub verbosity: u8, + + /// Set the path to the videos.db. This overrides the default and the config file. + #[arg(long, short)] + pub db_path: Option<PathBuf>, + + /// Set the path to the config.toml. + /// This overrides the default. + #[arg(long, short)] + pub config_path: Option<PathBuf>, + + /// Silence all output + #[arg(long, short = 'q')] + pub quiet: bool, +} + +#[derive(Subcommand, Debug)] +pub enum Command { + /// Download and cache URLs + Download { + /// Forcefully re-download all cached videos (i.e. delete the cache path, then download). + #[arg(short, long)] + force: bool, + + /// The maximum size the download dir should have. Beware that the value must be given in + /// bytes. + #[arg(short, long, value_parser = byte_parser)] + max_cache_size: Option<u64>, + }, + + /// Select, download and watch in one command. + Sedowa {}, + /// Download and watch in one command. + Dowa {}, + + /// Work with single videos + Videos { + #[command(subcommand)] + cmd: VideosCommand, + }, + + /// Watch the already cached (and selected) videos + Watch {}, + + /// Show, which videos have been selected to be watched (and their cache status) + Status {}, + + /// Show, the configuration options in effect + Config {}, + + /// Perform various tests + Check { + #[command(subcommand)] + command: CheckCommand, + }, + + /// Display the comments of the currently playing video + Comments {}, + /// Display the description of the currently playing video + Description {}, + + /// Manipulate the video cache in the database + #[command(visible_alias = "db")] + Database { + #[command(subcommand)] + command: CacheCommand, + }, + + /// Change the state of videos in the database (the default) + Select { + #[command(subcommand)] + cmd: Option<SelectCommand>, + }, + + /// Update the video database + Update { + #[arg(short, long)] + /// The number of videos to updating + max_backlog: Option<u32>, + + #[arg(short, long)] + /// The subscriptions to update (can be given multiple times) + subscriptions: Vec<String>, + }, + + /// Manipulate subscription + #[command(visible_alias = "subs")] + Subscriptions { + #[command(subcommand)] + cmd: SubscriptionCommand, + }, +} + +fn byte_parser(input: &str) -> Result<u64, anyhow::Error> { + Ok(input + .parse::<Bytes>() + .with_context(|| format!("Failed to parse '{}' as bytes!", input))? + .as_u64()) +} + +impl Default for Command { + fn default() -> Self { + Self::Select { + cmd: Some(SelectCommand::default()), + } + } +} + +#[derive(Subcommand, Clone, Debug)] +pub enum VideosCommand { + /// List the videos in the database + #[command(visible_alias = "ls")] + List { + /// An optional search query to limit the results + #[arg(action = ArgAction::Append)] + search_query: Option<String>, + + /// The number of videos to show + #[arg(short, long)] + limit: Option<usize>, + }, + + /// Get detailed information about a video + Info { + /// The short hash of the video + hash: LazyExtractorHash, + }, +} + +#[derive(Subcommand, Clone, Debug)] +pub enum SubscriptionCommand { + /// Subscribe to an URL + Add { + #[arg(short, long)] + /// The human readable name of the subscription + name: Option<String>, + + /// The URL to listen to + url: Url, + }, + + /// Unsubscribe from an URL + Remove { + /// The human readable name of the subscription + name: String, + }, + + /// Import a bunch of URLs as subscriptions. + Import { + /// The file containing the URLs. Will use Stdin otherwise. + file: Option<PathBuf>, + + /// Remove any previous subscriptions + #[arg(short, long)] + force: bool, + }, + /// Write all subscriptions in an format understood by `import` + Export {}, + + /// List all subscriptions + List {}, +} + +#[derive(Clone, Debug, Args)] +#[command(infer_subcommands = true)] +/// Mark the video given by the hash to be watched +pub struct SharedSelectionCommandArgs { + /// The ordering priority (higher means more at the top) + #[arg(short, long)] + pub priority: Option<i64>, + + /// The subtitles to download (e.g. 'en,de,sv') + #[arg(short = 'l', long)] + pub subtitle_langs: Option<String>, + + /// The speed to set mpv to + #[arg(short, long)] + pub speed: Option<f64>, + + /// The short extractor hash + pub hash: LazyExtractorHash, + + pub title: String, + + pub date: NaiveDate, + + pub publisher: String, + + pub duration: Duration, + + pub url: Url, +} + +#[derive(Subcommand, Clone, Debug)] +// NOTE: Keep this in sync with the [`constants::HELP_STR`] constant. <2024-08-20> +pub enum SelectCommand { + /// Open a `git rebase` like file to select the videos to watch (the default) + File { + /// Include done (watched, dropped) videos + #[arg(long, short)] + done: bool, + + /// Use the last selection file (useful if you've spend time on it and want to get it again) + #[arg(long, short, conflicts_with = "done")] + use_last_selection: bool, + }, + + /// Add a video to the database + #[command(visible_alias = "a")] + Add { urls: Vec<Url> }, + + /// Mark the video given by the hash to be watched + #[command(visible_alias = "w")] + Watch { + #[command(flatten)] + shared: SharedSelectionCommandArgs, + }, + + /// Mark the video given by the hash to be dropped + #[command(visible_alias = "d")] + Drop { + #[command(flatten)] + shared: SharedSelectionCommandArgs, + }, + + /// Mark the video given by the hash as already watched + #[command(visible_alias = "wd")] + Watched { + #[command(flatten)] + shared: SharedSelectionCommandArgs, + }, + + /// Open the video URL in Firefox's `timesinks.youtube` profile + #[command(visible_alias = "u")] + Url { + #[command(flatten)] + shared: SharedSelectionCommandArgs, + }, + + /// Reset the videos status to 'Pick' + #[command(visible_alias = "p")] + Pick { + #[command(flatten)] + shared: SharedSelectionCommandArgs, + }, +} +impl Default for SelectCommand { + fn default() -> Self { + Self::File { + done: false, + use_last_selection: false, + } + } +} + +#[derive(Subcommand, Clone, Debug)] +pub enum CheckCommand { + /// Check if the given info.json is deserializable + InfoJson { path: PathBuf }, + + /// Check if the given update info.json is deserializable + UpdateInfoJson { path: PathBuf }, +} + +#[derive(Subcommand, Clone, Copy, Debug)] +pub enum CacheCommand { + /// Invalidate all cache entries + Invalidate { + /// Also delete the cache path + #[arg(short, long)] + hard: bool, + }, + + /// Perform basic maintenance operations on the database. + /// This helps recovering from invalid db states after a crash (or force exit via CTRL+C). + /// + /// 1. Check every path for validity (removing all invalid cache entries) + /// 2. Reset all `status_change` bits of videos to false. + #[command(verbatim_doc_comment)] + Maintain { + /// Check every video (otherwise only the videos to be watched are checked) + #[arg(short, long)] + all: bool, + }, +} diff --git a/yt/src/comments/comment.rs b/yt/src/comments/comment.rs new file mode 100644 index 0000000..752c510 --- /dev/null +++ b/yt/src/comments/comment.rs @@ -0,0 +1,63 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use yt_dlp::wrapper::info_json::Comment; + +#[derive(Debug, Clone)] +pub struct CommentExt { + pub value: Comment, + pub replies: Vec<CommentExt>, +} + +#[derive(Debug, Default)] +pub struct Comments { + pub(super) vec: Vec<CommentExt>, +} + +impl Comments { + pub fn new() -> Self { + Self::default() + } + pub fn push(&mut self, value: CommentExt) { + self.vec.push(value); + } + pub fn get_mut(&mut self, key: &str) -> Option<&mut CommentExt> { + self.vec.iter_mut().filter(|c| c.value.id.id == key).last() + } + pub fn insert(&mut self, key: &str, value: CommentExt) { + let parent = self + .vec + .iter_mut() + .filter(|c| c.value.id.id == key) + .last() + .expect("One of these should exist"); + parent.push_reply(value); + } +} +impl CommentExt { + pub fn push_reply(&mut self, value: CommentExt) { + self.replies.push(value) + } + pub fn get_mut_reply(&mut self, key: &str) -> Option<&mut CommentExt> { + self.replies + .iter_mut() + .filter(|c| c.value.id.id == key) + .last() + } +} + +impl From<Comment> for CommentExt { + fn from(value: Comment) -> Self { + Self { + replies: vec![], + value, + } + } +} diff --git a/yt/src/comments/display.rs b/yt/src/comments/display.rs new file mode 100644 index 0000000..7000063 --- /dev/null +++ b/yt/src/comments/display.rs @@ -0,0 +1,117 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::fmt::Write; + +use chrono::{Local, TimeZone}; +use chrono_humanize::{Accuracy, HumanTime, Tense}; + +use crate::comments::comment::CommentExt; + +use super::comment::Comments; + +impl Comments { + pub fn render(&self, color: bool) -> String { + self.render_help(color).expect("This should never fail.") + } + + fn render_help(&self, color: bool) -> Result<String, std::fmt::Error> { + let mut f = String::new(); + + macro_rules! c { + ($color_str:expr, $write:ident, $color:expr) => { + if $color { + $write.write_str(concat!("\x1b[", $color_str, "m"))? + } + }; + } + + fn format( + comment: &CommentExt, + f: &mut String, + ident_count: u32, + color: bool, + ) -> std::fmt::Result { + let ident = &(0..ident_count).map(|_| " ").collect::<String>(); + let value = &comment.value; + + f.write_str(ident)?; + + if value.author_is_uploader { + c!("91;1", f, color); + } else { + c!("35", f, color); + } + + f.write_str(&value.author)?; + c!("0", f, color); + if value.edited || value.is_favorited { + f.write_str("[")?; + if value.edited { + f.write_str("")?; + } + if value.edited && value.is_favorited { + f.write_str(" ")?; + } + if value.is_favorited { + f.write_str("")?; + } + f.write_str("]")?; + } + + c!("36;1", f, color); + write!( + f, + " {}", + HumanTime::from( + Local + .timestamp_opt(value.timestamp, 0) + .single() + .expect("This should be valid") + ) + .to_text_en(Accuracy::Rough, Tense::Past) + )?; + c!("0", f, color); + + // c!("31;1", f); + // f.write_fmt(format_args!(" [{}]", comment.value.like_count))?; + // c!("0", f); + + f.write_str(":\n")?; + f.write_str(ident)?; + + f.write_str(&value.text.replace('\n', &format!("\n{}", ident)))?; + f.write_str("\n")?; + + if !comment.replies.is_empty() { + let mut children = comment.replies.clone(); + children.sort_by(|a, b| a.value.timestamp.cmp(&b.value.timestamp)); + + for child in children { + format(&child, f, ident_count + 4, color)?; + } + } else { + f.write_str("\n")?; + } + + Ok(()) + } + + if !&self.vec.is_empty() { + let mut children = self.vec.clone(); + children.sort_by(|a, b| b.value.like_count.cmp(&a.value.like_count)); + + for child in children { + format(&child, &mut f, 0, color)? + } + } + Ok(f) + } +} diff --git a/yt/src/comments/mod.rs b/yt/src/comments/mod.rs new file mode 100644 index 0000000..5fbc3fb --- /dev/null +++ b/yt/src/comments/mod.rs @@ -0,0 +1,178 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{ + io::Write, + mem, + process::{Command, Stdio}, +}; + +use anyhow::{bail, Context, Result}; +use comment::{CommentExt, Comments}; +use regex::Regex; +use yt_dlp::wrapper::info_json::{Comment, InfoJson, Parent}; + +use crate::{ + app::App, + storage::video_database::{ + getters::{get_currently_playing_video, get_video_info_json}, + Video, + }, +}; + +mod comment; +mod display; + +pub async fn get_comments(app: &App) -> Result<Comments> { + let currently_playing_video: Video = + if let Some(video) = get_currently_playing_video(app).await? { + video + } else { + bail!("Could not find a currently playing video!"); + }; + + let mut info_json: InfoJson = get_video_info_json(¤tly_playing_video) + .await? + .expect("A currently *playing* must be cached. And thus the info.json should be available"); + + let base_comments = mem::take(&mut info_json.comments).expect("A video should have comments"); + drop(info_json); + + let mut comments = Comments::new(); + base_comments.into_iter().for_each(|c| { + if let Parent::Id(id) = &c.parent { + comments.insert(&(id.clone()), CommentExt::from(c)); + } else { + comments.push(CommentExt::from(c)); + } + }); + + comments.vec.iter_mut().for_each(|comment| { + let replies = mem::take(&mut comment.replies); + let mut output_replies: Vec<CommentExt> = vec![]; + + let re = Regex::new(r"\u{200b}?(@[^\t\s]+)\u{200b}?").unwrap(); + for reply in replies { + if let Some(replyee_match) = re.captures(&reply.value.text){ + let full_match = replyee_match.get(0).expect("This always exists"); + let text = reply. + value. + text[0..full_match.start()] + .to_owned() + + + &reply + .value + .text[full_match.end()..]; + let text: &str = text.trim().trim_matches('\u{200b}'); + + let replyee = replyee_match.get(1).expect("This should exist").as_str(); + + + if let Some(parent) = output_replies + .iter_mut() + // .rev() + .flat_map(|com| &mut com.replies) + .flat_map(|com| &mut com.replies) + .flat_map(|com| &mut com.replies) + .filter(|com| com.value.author == replyee) + .last() + { + parent.replies.push(CommentExt::from(Comment { + text: text.to_owned(), + ..reply.value + })) + } else if let Some(parent) = output_replies + .iter_mut() + // .rev() + .flat_map(|com| &mut com.replies) + .flat_map(|com| &mut com.replies) + .filter(|com| com.value.author == replyee) + .last() + { + parent.replies.push(CommentExt::from(Comment { + text: text.to_owned(), + ..reply.value + })) + } else if let Some(parent) = output_replies + .iter_mut() + // .rev() + .flat_map(|com| &mut com.replies) + .filter(|com| com.value.author == replyee) + .last() + { + parent.replies.push(CommentExt::from(Comment { + text: text.to_owned(), + ..reply.value + })) + } else if let Some(parent) = output_replies.iter_mut() + // .rev() + .filter(|com| com.value.author == replyee) + .last() + { + parent.replies.push(CommentExt::from(Comment { + text: text.to_owned(), + ..reply.value + })) + } else { + eprintln!( + "Failed to find a parent for ('{}') both directly and via replies! The reply text was:\n'{}'\n", + replyee, + reply.value.text + ); + output_replies.push(reply); + } + } else { + output_replies.push(reply); + } + } + comment.replies = output_replies; + }); + + Ok(comments) +} + +pub async fn comments(app: &App) -> Result<()> { + let comments = get_comments(app).await?; + + let mut less = Command::new("less") + .args(["--raw-control-chars"]) + .stdin(Stdio::piped()) + .stderr(Stdio::inherit()) + .spawn() + .context("Failed to run less")?; + + let mut child = Command::new("fmt") + .args(["--uniform-spacing", "--split-only", "--width=90"]) + .stdin(Stdio::piped()) + .stderr(Stdio::inherit()) + .stdout(less.stdin.take().expect("Should be open")) + .spawn() + .context("Failed to run fmt")?; + + let mut stdin = child.stdin.take().context("Failed to open stdin")?; + std::thread::spawn(move || { + stdin + .write_all(comments.render(true).as_bytes()) + .expect("Should be able to write to stdin of fmt"); + }); + + let _ = less.wait().context("Failed to await less")?; + + Ok(()) +} + +#[cfg(test)] +mod test { + #[test] + fn test_string_replacement() { + let s = "A \n\nB\n\nC".to_owned(); + assert_eq!("A \n \n B\n \n C", s.replace('\n', "\n ")) + } +} diff --git a/yt/src/config/default.rs b/yt/src/config/default.rs new file mode 100644 index 0000000..59063f5 --- /dev/null +++ b/yt/src/config/default.rs @@ -0,0 +1,102 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::path::PathBuf; + +use anyhow::{Context, Result}; + +fn get_runtime_path(name: &'static str) -> Result<PathBuf> { + let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?; + xdg_dirs + .place_runtime_file(name) + .with_context(|| format!("Failed to place runtime file: '{}'", name)) +} +fn get_data_path(name: &'static str) -> Result<PathBuf> { + let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?; + xdg_dirs + .place_data_file(name) + .with_context(|| format!("Failed to place data file: '{}'", name)) +} +fn get_config_path(name: &'static str) -> Result<PathBuf> { + let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?; + xdg_dirs + .place_config_file(name) + .with_context(|| format!("Failed to place config file: '{}'", name)) +} + +pub(super) fn create_path(path: PathBuf) -> Result<PathBuf> { + if !path.exists() { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .with_context(|| format!("Failed to create the '{}' directory", path.display()))? + } + } + + Ok(path) +} + +pub const PREFIX: &str = "yt"; + +pub mod select { + pub fn playback_speed() -> f64 { + 2.7 + } + pub fn subtitle_langs() -> &'static str { + "" + } +} + +pub mod watch { + pub fn local_comments_length() -> usize { + 1000 + } +} + +pub mod update { + pub fn max_backlog() -> u32 { + 20 + } +} + +pub mod paths { + use std::{env::temp_dir, path::PathBuf}; + + use anyhow::Result; + + use super::{create_path, get_config_path, get_data_path, get_runtime_path, PREFIX}; + + // We download to the temp dir to avoid taxing the disk + pub fn download_dir() -> Result<PathBuf> { + let temp_dir = temp_dir(); + + create_path(temp_dir.join(PREFIX)) + } + pub fn mpv_config_path() -> Result<PathBuf> { + get_config_path("mpv.conf") + } + pub fn mpv_input_path() -> Result<PathBuf> { + get_config_path("mpv.input.conf") + } + pub fn database_path() -> Result<PathBuf> { + get_data_path("videos.sqlite") + } + pub fn config_path() -> Result<PathBuf> { + get_config_path("config.toml") + } + pub fn last_selection_path() -> Result<PathBuf> { + get_runtime_path("selected.yts") + } +} + +pub mod download { + pub fn max_cache_size() -> &'static str { + "3 GiB" + } +} diff --git a/yt/src/config/definitions.rs b/yt/src/config/definitions.rs new file mode 100644 index 0000000..d37e6da --- /dev/null +++ b/yt/src/config/definitions.rs @@ -0,0 +1,59 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::path::PathBuf; + +use serde::Deserialize; + +#[derive(Debug, Deserialize, PartialEq)] +#[serde(deny_unknown_fields)] +pub struct ConfigFile { + pub select: Option<SelectConfig>, + pub watch: Option<WatchConfig>, + pub paths: Option<PathsConfig>, + pub download: Option<DownloadConfig>, + pub update: Option<UpdateConfig>, +} + +#[derive(Debug, Deserialize, PartialEq, Clone, Copy)] +#[serde(deny_unknown_fields)] +pub struct UpdateConfig { + pub max_backlog: Option<u32>, +} + +#[derive(Debug, Deserialize, PartialEq, Clone)] +#[serde(deny_unknown_fields)] +pub struct DownloadConfig { + /// This will then be converted to an u64 + pub max_cache_size: Option<String>, +} + +#[derive(Debug, Deserialize, PartialEq, Clone)] +#[serde(deny_unknown_fields)] +pub struct SelectConfig { + pub playback_speed: Option<f64>, + pub subtitle_langs: Option<String>, +} + +#[derive(Debug, Deserialize, PartialEq, Clone, Copy)] +#[serde(deny_unknown_fields)] +pub struct WatchConfig { + pub local_comments_length: Option<usize>, +} + +#[derive(Debug, Deserialize, PartialEq, Clone)] +#[serde(deny_unknown_fields)] +pub struct PathsConfig { + pub download_dir: Option<PathBuf>, + pub mpv_config_path: Option<PathBuf>, + pub mpv_input_path: Option<PathBuf>, + pub database_path: Option<PathBuf>, + pub last_selection_path: Option<PathBuf>, +} diff --git a/yt/src/config/file_system.rs b/yt/src/config/file_system.rs new file mode 100644 index 0000000..5751583 --- /dev/null +++ b/yt/src/config/file_system.rs @@ -0,0 +1,112 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use crate::config::{DownloadConfig, PathsConfig, SelectConfig, WatchConfig}; + +use super::{ + default::{create_path, download, paths, select, update, watch}, + Config, UpdateConfig, +}; + +use std::{fs::read_to_string, path::PathBuf}; + +use anyhow::{Context, Result}; +use bytes::Bytes; + +macro_rules! get { + ($default:path, $config:expr, $key_one:ident, $($keys:ident),*) => { + { + let maybe_value = get!{@option $config, $key_one, $($keys),*}; + if let Some(value) = maybe_value { + value + } else { + $default().to_owned() + } + } + }; + + (@option $config:expr, $key_one:ident, $($keys:ident),*) => { + if let Some(key) = $config.$key_one.clone() { + get!{@option key, $($keys),*} + } else { + None + } + }; + (@option $config:expr, $key_one:ident) => { + $config.$key_one + }; + + (@path_if_none $config:expr, $option_default:expr, $default:path, $key_one:ident, $($keys:ident),*) => { + { + let maybe_download_dir: Option<PathBuf> = + get! {@option $config, $key_one, $($keys),*}; + + let down_dir = if let Some(dir) = maybe_download_dir { + PathBuf::from(dir) + } else { + if let Some(path) = $option_default { + path + } else { + $default() + .with_context(|| format!("Failed to get default path for: '{}.{}'", stringify!($key_one), stringify!($($keys),*)))? + } + }; + create_path(down_dir)? + } + }; + (@path $config:expr, $default:path, $key_one:ident, $($keys:ident),*) => { + get! {@path_if_none $config, None, $default, $key_one, $($keys),*} + }; +} + +impl Config { + pub fn from_config_file( + db_path: Option<PathBuf>, + config_path: Option<PathBuf>, + ) -> Result<Self> { + let config_file_path = config_path + .map(Ok) + .unwrap_or_else(|| -> Result<_> { paths::config_path() })?; + + let config: super::definitions::ConfigFile = + toml::from_str(&read_to_string(config_file_path).unwrap_or("".to_owned())) + .context("Failed to parse the config file as toml")?; + + Ok(Self { + select: SelectConfig { + playback_speed: get! {select::playback_speed, config, select, playback_speed}, + subtitle_langs: get! {select::subtitle_langs, config, select, subtitle_langs}, + }, + watch: WatchConfig { + local_comments_length: get! {watch::local_comments_length, config, watch, local_comments_length}, + }, + update: UpdateConfig { + max_backlog: get! {update::max_backlog, config, update, max_backlog}, + }, + paths: PathsConfig { + download_dir: get! {@path config, paths::download_dir, paths, download_dir}, + mpv_config_path: get! {@path config, paths::mpv_config_path, paths, mpv_config_path}, + mpv_input_path: get! {@path config, paths::mpv_input_path, paths, mpv_input_path}, + database_path: get! {@path_if_none config, db_path, paths::database_path, paths, database_path}, + last_selection_path: get! {@path config, paths::last_selection_path, paths, last_selection_path}, + }, + download: DownloadConfig { + max_cache_size: { + let bytes_str: String = + get! {download::max_cache_size, config, download, max_cache_size}; + let number: Bytes = bytes_str + .parse() + .context("Failed to parse max_cache_size")?; + number + }, + }, + }) + } +} diff --git a/yt/src/config/mod.rs b/yt/src/config/mod.rs new file mode 100644 index 0000000..ea40055 --- /dev/null +++ b/yt/src/config/mod.rs @@ -0,0 +1,62 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::path::PathBuf; + +use bytes::Bytes; +use serde::Serialize; + +mod default; +mod definitions; +pub mod file_system; + +#[derive(Serialize)] +pub struct Config { + pub select: SelectConfig, + pub watch: WatchConfig, + pub paths: PathsConfig, + pub download: DownloadConfig, + pub update: UpdateConfig, +} +#[derive(Serialize)] +pub struct UpdateConfig { + pub max_backlog: u32, +} +#[derive(Serialize)] +pub struct DownloadConfig { + pub max_cache_size: Bytes, +} +#[derive(Serialize)] +pub struct SelectConfig { + pub playback_speed: f64, + pub subtitle_langs: String, +} +#[derive(Serialize)] +pub struct WatchConfig { + pub local_comments_length: usize, +} +#[derive(Serialize)] +pub struct PathsConfig { + pub download_dir: PathBuf, + pub mpv_config_path: PathBuf, + pub mpv_input_path: PathBuf, + pub database_path: PathBuf, + pub last_selection_path: PathBuf, +} + +// pub fn status_path() -> anyhow::Result<PathBuf> { +// const STATUS_PATH: &str = "running.info.json"; +// get_runtime_path(STATUS_PATH) +// } + +// pub fn subscriptions() -> anyhow::Result<PathBuf> { +// const SUBSCRIPTIONS: &str = "subscriptions.json"; +// get_data_path(SUBSCRIPTIONS) +// } diff --git a/yt/src/constants.rs b/yt/src/constants.rs new file mode 100644 index 0000000..54cae89 --- /dev/null +++ b/yt/src/constants.rs @@ -0,0 +1,11 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +pub const HELP_STR: &str = include_str!("./select/selection_file/help.str"); diff --git a/yt/src/download/download_options.rs b/yt/src/download/download_options.rs new file mode 100644 index 0000000..e93170a --- /dev/null +++ b/yt/src/download/download_options.rs @@ -0,0 +1,121 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use serde_json::{json, Value}; + +use crate::{app::App, storage::video_database::YtDlpOptions}; + +// { +// "ratelimit": conf.ratelimit if conf.ratelimit > 0 else None, +// "retries": conf.retries, +// "merge_output_format": conf.merge_output_format, +// "restrictfilenames": conf.restrict_filenames, +// "ignoreerrors": False, +// "postprocessors": [{"key": "FFmpegMetadata"}], +// "logger": _ytdl_logger +// } + +pub fn download_opts( + app: &App, + additional_opts: YtDlpOptions, +) -> serde_json::Map<String, serde_json::Value> { + match json!({ + "extract_flat": false, + "extractor_args": { + "youtube": { + "comment_sort": [ + "top" + ], + "max_comments": [ + "150", + "all", + "100" + ] + } + }, + "ffmpeg_location": env!("FFMPEG_LOCATION"), + "format": "bestvideo[height<=?1080]+bestaudio/best", + "fragment_retries": 10, + "getcomments": true, + "ignoreerrors": false, + "retries": 10, + + "writeinfojson": true, + "writeannotations": true, + "writesubtitles": true, + "writeautomaticsub": true, + + "outtmpl": { + "default": app.config.paths.download_dir.join("%(channel)s/%(title)s.%(ext)s"), + "chapter": "%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s" + }, + "compat_opts": {}, + "forceprint": {}, + "print_to_file": {}, + "windowsfilenames": false, + "restrictfilenames": false, + "trim_file_names": false, + "postprocessors": [ + { + "api": "https://sponsor.ajay.app", + "categories": [ + "interaction", + "intro", + "music_offtopic", + "sponsor", + "outro", + "poi_highlight", + "preview", + "selfpromo", + "filler", + "chapter" + ], + "key": "SponsorBlock", + "when": "after_filter" + }, + { + "force_keyframes": false, + "key": "ModifyChapters", + "remove_chapters_patterns": [], + "remove_ranges": [], + "remove_sponsor_segments": [ + "sponsor" + ], + "sponsorblock_chapter_title": "[SponsorBlock]: %(category_names)l" + }, + { + "add_chapters": true, + "add_infojson": null, + "add_metadata": false, + "key": "FFmpegMetadata" + }, + { + "key": "FFmpegConcat", + "only_multi_video": true, + "when": "playlist" + } + ] + }) { + serde_json::Value::Object(mut obj) => { + obj.insert( + "subtitleslangs".to_owned(), + serde_json::Value::Array( + additional_opts + .subtitle_langs + .split(',') + .map(|val| Value::String(val.to_owned())) + .collect::<Vec<_>>(), + ), + ); + obj + } + _ => unreachable!("This is an object"), + } +} diff --git a/yt/src/download/mod.rs b/yt/src/download/mod.rs new file mode 100644 index 0000000..56910f9 --- /dev/null +++ b/yt/src/download/mod.rs @@ -0,0 +1,303 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{collections::HashMap, str::FromStr, sync::Arc, time::Duration}; + +use crate::{ + app::App, + download::download_options::download_opts, + storage::video_database::{ + downloader::{get_next_uncached_video, set_video_cache_path}, + extractor_hash::ExtractorHash, + getters::get_video_yt_dlp_opts, + Video, YtDlpOptions, + }, +}; + +use anyhow::{bail, Context, Result}; +use bytes::Bytes; +use futures::{future::BoxFuture, FutureExt}; +use log::{debug, error, info, warn}; +use tokio::{fs, task::JoinHandle, time}; + +pub mod download_options; + +#[derive(Debug)] +pub struct CurrentDownload { + task_handle: JoinHandle<Result<()>>, + extractor_hash: ExtractorHash, +} + +impl CurrentDownload { + fn new_from_video(app: Arc<App>, video: Video) -> Self { + let extractor_hash = video.extractor_hash.clone(); + + let task_handle = tokio::spawn(async move { + Downloader::actually_cache_video(&app, &video) + .await + .with_context(|| format!("Failed to cache video: '{}'", video.title))?; + Ok(()) + }); + + Self { + task_handle, + extractor_hash, + } + } +} + +enum CacheSizeCheck { + /// The video can be downloaded + Fits, + + /// The video and the current cache size together would exceed the size + TooLarge, + + /// The video would not even fit into the empty cache + ExceedsMaxCacheSize, +} + +pub struct Downloader { + current_download: Option<CurrentDownload>, + video_size_cache: HashMap<ExtractorHash, u64>, + printed_warning: bool, + cached_cache_allocation: Option<u64>, +} + +impl Default for Downloader { + fn default() -> Self { + Self::new() + } +} + +impl Downloader { + pub fn new() -> Self { + Self { + current_download: None, + video_size_cache: HashMap::new(), + printed_warning: false, + cached_cache_allocation: None, + } + } + + /// Check if enough cache is available. Will wait for 10s if it's not. + async fn is_enough_cache_available( + &mut self, + app: &App, + max_cache_size: u64, + next_video: &Video, + ) -> Result<CacheSizeCheck> { + if let Some(cdownload) = &self.current_download { + if cdownload.extractor_hash == next_video.extractor_hash { + // If the video is already being downloaded it will always fit. Otherwise the + // download would not have been started. + return Ok(CacheSizeCheck::Fits); + } + } + let cache_allocation = Self::get_current_cache_allocation(app).await?; + let video_size = self.get_approx_video_size(app, next_video).await?; + + if video_size >= max_cache_size { + error!( + "The video '{}' ({}) exceeds the maximum cache size ({})! \ + Please set a bigger maximum (`--max-cache-size`) or skip it.", + next_video.title, + Bytes::new(video_size), + Bytes::new(max_cache_size) + ); + + return Ok(CacheSizeCheck::ExceedsMaxCacheSize); + } + + if cache_allocation + video_size >= max_cache_size { + if !self.printed_warning { + warn!( + "Can't download video: '{}' ({}) as it's too large for the cache ({} of {} allocated). \ + Waiting for cache size reduction..", + next_video.title, Bytes::new(video_size), Bytes::new(cache_allocation), Bytes::new(max_cache_size) + ); + self.printed_warning = true; + self.cached_cache_allocation = Some(cache_allocation); + } + if let Some(cca) = self.cached_cache_allocation { + if cca != cache_allocation { + warn!( + "Current cache size has changed, it's now: '{}'", + Bytes::new(cache_allocation) + ); + self.cached_cache_allocation = Some(cache_allocation); + } + } else { + info!( + "Current cache size allocation: '{}'", + Bytes::new(cache_allocation) + ); + self.cached_cache_allocation = Some(cache_allocation); + } + + // Wait and hope, that a large video is deleted from the cache. + time::sleep(Duration::from_secs(10)).await; + Ok(CacheSizeCheck::TooLarge) + } else { + self.printed_warning = false; + Ok(CacheSizeCheck::Fits) + } + } + + /// The entry point to the Downloader. + /// This Downloader will periodically check if the database has changed, and then also + /// change which videos it downloads. + /// This will run, until the database doesn't contain any watchable videos + pub async fn consume(&mut self, app: Arc<App>, max_cache_size: u64) -> Result<()> { + while let Some(next_video) = get_next_uncached_video(&app).await? { + match self + .is_enough_cache_available(&app, max_cache_size, &next_video) + .await? + { + CacheSizeCheck::Fits => (), + CacheSizeCheck::TooLarge => continue, + CacheSizeCheck::ExceedsMaxCacheSize => bail!("Giving up."), + }; + + if self.current_download.is_some() { + let current_download = self.current_download.take().expect("Is Some"); + + if current_download.task_handle.is_finished() { + current_download.task_handle.await??; + continue; + } + + if next_video.extractor_hash != current_download.extractor_hash { + info!( + "Noticed, that the next video is not the video being downloaded, replacing it ('{}' vs. '{}')!", + next_video.extractor_hash.into_short_hash(&app).await?, current_download.extractor_hash.into_short_hash(&app).await? + ); + + // Replace the currently downloading video + current_download.task_handle.abort(); + + let new_current_download = + CurrentDownload::new_from_video(Arc::clone(&app), next_video); + + self.current_download = Some(new_current_download); + } else { + // Reset the taken value + self.current_download = Some(current_download); + } + } else { + info!( + "No video is being downloaded right now, setting it to '{}'", + next_video.title + ); + let new_current_download = + CurrentDownload::new_from_video(Arc::clone(&app), next_video); + self.current_download = Some(new_current_download); + } + + time::sleep(Duration::new(1, 0)).await; + } + + info!("Finished downloading!"); + Ok(()) + } + + pub async fn get_current_cache_allocation(app: &App) -> Result<u64> { + fn dir_size(mut dir: fs::ReadDir) -> BoxFuture<'static, Result<u64>> { + async move { + let mut acc = 0; + while let Some(entry) = dir.next_entry().await? { + let size = match entry.metadata().await? { + data if data.is_dir() => { + let path = entry.path(); + let read_dir = fs::read_dir(path).await?; + + dir_size(read_dir).await? + } + data => data.len(), + }; + acc += size; + } + Ok(acc) + } + .boxed() + } + + dir_size(fs::read_dir(&app.config.paths.download_dir).await?).await + } + + async fn get_approx_video_size(&mut self, app: &App, video: &Video) -> Result<u64> { + if let Some(value) = self.video_size_cache.get(&video.extractor_hash) { + Ok(*value) + } else { + // the subtitle file size should be negligible + let add_opts = YtDlpOptions { + subtitle_langs: "".to_owned(), + }; + let opts = &download_opts(app, add_opts); + + let result = yt_dlp::extract_info(opts, &video.url, false, true) + .await + .with_context(|| { + format!("Failed to extract video information: '{}'", video.title) + })?; + + let size = if let Some(val) = result.filesize { + val + } else if let Some(val) = result.filesize_approx { + val + } else if result.duration.is_some() && result.tbr.is_some() { + let duration = result.duration.expect("Is some").ceil() as u64; + + // TODO: yt_dlp gets this from the format + let tbr = result.tbr.expect("Is Some").ceil() as u64; + + duration * tbr * (1000 / 8) + } else { + let hardcoded_default = Bytes::from_str("250 MiB").expect("This is hardcoded"); + error!( + "Failed to find a filesize for video: '{}' (Using hardcoded value of {})", + video.title, hardcoded_default + ); + hardcoded_default.as_u64() + }; + + assert_eq!( + self.video_size_cache + .insert(video.extractor_hash.clone(), size), + None + ); + + Ok(size) + } + } + + async fn actually_cache_video(app: &App, video: &Video) -> Result<()> { + debug!("Download started: {}", &video.title); + + let addional_opts = get_video_yt_dlp_opts(app, &video.extractor_hash).await?; + + let result = yt_dlp::download(&[video.url.clone()], &download_opts(app, addional_opts)) + .await + .with_context(|| format!("Failed to download video: '{}'", video.title))?; + + assert_eq!(result.len(), 1); + let result = &result[0]; + + set_video_cache_path(app, &video.extractor_hash, Some(result)).await?; + + info!( + "Video '{}' was downlaoded to path: {}", + video.title, + result.display() + ); + + Ok(()) + } +} diff --git a/yt/src/main.rs b/yt/src/main.rs new file mode 100644 index 0000000..37283a1 --- /dev/null +++ b/yt/src/main.rs @@ -0,0 +1,252 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{collections::HashMap, fs, sync::Arc}; + +use anyhow::{bail, Context, Result}; +use app::App; +use bytes::Bytes; +use cache::invalidate; +use clap::Parser; +use cli::{CacheCommand, CheckCommand, SelectCommand, SubscriptionCommand, VideosCommand}; +use config::Config; +use log::info; +use select::cmds::handle_select_cmd; +use storage::video_database::getters::get_video_by_hash; +use tokio::{ + fs::File, + io::{stdin, BufReader}, + task::JoinHandle, +}; +use url::Url; +use videos::display::format_video::FormatVideo; +use yt_dlp::wrapper::info_json::InfoJson; + +use crate::{cli::Command, storage::subscriptions::get_subscriptions}; + +pub mod app; +pub mod cli; + +pub mod cache; +pub mod comments; +pub mod config; +pub mod constants; +pub mod download; +pub mod select; +pub mod status; +pub mod storage; +pub mod subscribe; +pub mod update; +pub mod videos; +pub mod watch; + +#[tokio::main] +async fn main() -> Result<()> { + let args = cli::CliArgs::parse(); + + // The default verbosity is 1 (Warn) + let verbosity: u8 = args.verbosity + 1; + + stderrlog::new() + .module(module_path!()) + .modules(&["yt_dlp".to_owned(), "libmpv2".to_owned()]) + .quiet(args.quiet) + .show_module_names(false) + .color(stderrlog::ColorChoice::Auto) + .verbosity(verbosity as usize) + .timestamp(stderrlog::Timestamp::Off) + .init() + .expect("Let's just hope that this does not panic"); + + info!("Using verbosity level: '{} ({})'", verbosity, { + match verbosity { + 0 => "Error", + 1 => "Warn", + 2 => "Info", + 3 => "Debug", + 4.. => "Trace", + } + }); + + let app = { + let config = Config::from_config_file(args.db_path, args.config_path)?; + App::new(config).await? + }; + + match args.command.unwrap_or(Command::default()) { + Command::Download { + force, + max_cache_size, + } => { + let max_cache_size = + max_cache_size.unwrap_or(app.config.download.max_cache_size.as_u64()); + info!("Max cache size: '{}'", Bytes::new(max_cache_size)); + + if force { + invalidate(&app, true).await?; + } + + download::Downloader::new() + .consume(Arc::new(app), max_cache_size) + .await?; + } + Command::Select { cmd } => { + let cmd = cmd.unwrap_or(SelectCommand::default()); + + match cmd { + SelectCommand::File { + done, + use_last_selection, + } => select::select(&app, done, use_last_selection).await?, + _ => handle_select_cmd(&app, cmd, None).await?, + } + } + Command::Sedowa {} => { + select::select(&app, false, false).await?; + + let arc_app = Arc::new(app); + dowa(arc_app).await?; + } + Command::Dowa {} => { + let arc_app = Arc::new(app); + dowa(arc_app).await?; + } + Command::Videos { cmd } => match cmd { + VideosCommand::List { + search_query, + limit, + } => { + videos::query(&app, limit, search_query) + .await + .context("Failed to query videos")?; + } + VideosCommand::Info { hash } => { + let video = get_video_by_hash(&app, &hash.realize(&app).await?).await?; + + print!( + "{}", + (&video + .to_formatted_video(&app) + .await + .context("Failed to format video")? + .colorize()) + .to_info_display() + ); + } + }, + Command::Update { + max_backlog, + subscriptions, + } => { + let all_subs = get_subscriptions(&app).await?; + + for sub in &subscriptions { + if !all_subs.0.contains_key(sub) { + bail!( + "Your specified subscription to update '{}' is not a subscription!", + sub + ) + } + } + + let max_backlog = max_backlog.unwrap_or(app.config.update.max_backlog); + + update::update(&app, max_backlog, subscriptions, verbosity).await?; + } + Command::Subscriptions { cmd } => match cmd { + SubscriptionCommand::Add { name, url } => { + subscribe::subscribe(&app, name, url) + .await + .context("Failed to add a subscription")?; + } + SubscriptionCommand::Remove { name } => { + subscribe::unsubscribe(&app, name) + .await + .context("Failed to remove a subscription")?; + } + SubscriptionCommand::List {} => { + let all_subs = get_subscriptions(&app).await?; + + for (key, val) in all_subs.0 { + println!("{}: '{}'", key, val.url); + } + } + SubscriptionCommand::Export {} => { + let all_subs = get_subscriptions(&app).await?; + for val in all_subs.0.values() { + println!("{}", val.url); + } + } + SubscriptionCommand::Import { file, force } => { + if let Some(file) = file { + let f = File::open(file).await?; + + subscribe::import(&app, BufReader::new(f), force).await? + } else { + subscribe::import(&app, BufReader::new(stdin()), force).await? + }; + } + }, + + Command::Watch {} => watch::watch(&app).await?, + + Command::Status {} => status::show(&app).await?, + Command::Config {} => status::config(&app)?, + + Command::Database { command } => match command { + CacheCommand::Invalidate { hard } => cache::invalidate(&app, hard).await?, + CacheCommand::Maintain { all } => cache::maintain(&app, all).await?, + }, + + Command::Check { command } => match command { + CheckCommand::InfoJson { path } => { + let string = fs::read_to_string(&path) + .with_context(|| format!("Failed to read '{}' to string!", path.display()))?; + + let _: InfoJson = + serde_json::from_str(&string).context("Failed to deserialize value")?; + } + CheckCommand::UpdateInfoJson { path } => { + let string = fs::read_to_string(&path) + .with_context(|| format!("Failed to read '{}' to string!", path.display()))?; + + let _: HashMap<Url, InfoJson> = + serde_json::from_str(&string).context("Failed to deserialize value")?; + } + }, + Command::Comments {} => { + comments::comments(&app).await?; + } + Command::Description {} => { + todo!() + // description::description(&app).await?; + } + } + + Ok(()) +} + +async fn dowa(arc_app: Arc<App>) -> Result<()> { + let max_cache_size = arc_app.config.download.max_cache_size; + info!("Max cache size: '{}'", max_cache_size); + + let arc_app_clone = Arc::clone(&arc_app); + let download: JoinHandle<Result<()>> = tokio::spawn(async move { + download::Downloader::new() + .consume(arc_app_clone, max_cache_size.as_u64()) + .await?; + + Ok(()) + }); + + watch::watch(&arc_app).await?; + download.await??; + Ok(()) +} diff --git a/yt/src/select/cmds.rs b/yt/src/select/cmds.rs new file mode 100644 index 0000000..6e71607 --- /dev/null +++ b/yt/src/select/cmds.rs @@ -0,0 +1,147 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use crate::{ + app::App, + cli::{SelectCommand, SharedSelectionCommandArgs}, + download::download_options::download_opts, + storage::video_database::{ + self, + getters::get_video_by_hash, + setters::{add_video, set_video_options, set_video_status}, + VideoOptions, VideoStatus, + }, + update::video_entry_to_video, + videos::display::format_video::FormatVideo, +}; + +use anyhow::{bail, Context, Result}; +use futures::future::join_all; +use yt_dlp::wrapper::info_json::InfoType; + +pub async fn handle_select_cmd( + app: &App, + cmd: SelectCommand, + line_number: Option<i64>, +) -> Result<()> { + match cmd { + SelectCommand::Pick { shared } => { + handle_status_change(app, shared, line_number, VideoStatus::Pick).await?; + } + SelectCommand::Drop { shared } => { + handle_status_change(app, shared, line_number, VideoStatus::Drop).await?; + } + SelectCommand::Watched { shared } => { + handle_status_change(app, shared, line_number, VideoStatus::Watched).await?; + } + SelectCommand::Add { urls } => { + for url in urls { + let opts = download_opts( + &app, + video_database::YtDlpOptions { + subtitle_langs: "".to_owned(), + }, + ); + let entry = yt_dlp::extract_info(&opts, &url, false, true) + .await + .with_context(|| format!("Failed to fetch entry for url: '{}'", url))?; + + async fn add_entry( + app: &App, + entry: yt_dlp::wrapper::info_json::InfoJson, + ) -> Result<()> { + let video = video_entry_to_video(entry, None)?; + println!( + "{}", + (&video.to_formatted_video(app).await?.colorize()).to_line_display() + ); + add_video(app, video).await?; + + Ok(()) + } + + match entry._type { + Some(InfoType::Video) => { + add_entry(&app, entry).await?; + } + Some(InfoType::Playlist) => { + if let Some(mut entries) = entry.entries { + if !entries.is_empty() { + // Pre-warm the cache + add_entry(app, entries.remove(0)).await?; + + let futures: Vec<_> = entries + .into_iter() + .map(|entry| add_entry(&app, entry)) + .collect(); + + join_all(futures).await.into_iter().collect::<Result<_>>()?; + } + } else { + bail!("Your playlist does not seem to have any entries!") + } + } + other => bail!( + "Your URL should point to a video or a playlist, but points to a '{:#?}'", + other + ), + } + } + } + SelectCommand::Watch { shared } => { + let hash = shared.hash.clone().realize(app).await?; + + let video = get_video_by_hash(app, &hash).await?; + if video.cache_path.is_some() { + handle_status_change(app, shared, line_number, VideoStatus::Cached).await?; + } else { + handle_status_change(app, shared, line_number, VideoStatus::Watch).await?; + } + } + + SelectCommand::Url { shared } => { + let mut firefox = std::process::Command::new("firefox"); + firefox.args(["-P", "timesinks.youtube"]); + firefox.arg(shared.url.as_str()); + let _handle = firefox.spawn().context("Failed to run firefox")?; + } + SelectCommand::File { .. } => unreachable!("This should have been filtered out"), + } + Ok(()) +} + +async fn handle_status_change( + app: &App, + shared: SharedSelectionCommandArgs, + line_number: Option<i64>, + new_status: VideoStatus, +) -> Result<()> { + let hash = shared.hash.realize(app).await?; + let video_options = VideoOptions::new( + shared + .subtitle_langs + .unwrap_or(app.config.select.subtitle_langs.clone()), + shared.speed.unwrap_or(app.config.select.playback_speed), + ); + let priority = compute_priority(line_number, shared.priority); + + set_video_status(app, &hash, new_status, priority).await?; + set_video_options(app, &hash, &video_options).await?; + + Ok(()) +} + +fn compute_priority(line_number: Option<i64>, priority: Option<i64>) -> Option<i64> { + if let Some(pri) = priority { + Some(pri) + } else { + line_number + } +} diff --git a/yt/src/select/mod.rs b/yt/src/select/mod.rs new file mode 100644 index 0000000..ca7a203 --- /dev/null +++ b/yt/src/select/mod.rs @@ -0,0 +1,173 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{ + env::{self}, + fs, + io::{BufRead, Write}, + io::{BufReader, BufWriter}, +}; + +use crate::{ + app::App, + cli::CliArgs, + constants::HELP_STR, + storage::video_database::{getters::get_videos, VideoStatus}, + videos::display::format_video::FormatVideo, +}; + +use anyhow::{bail, Context, Result}; +use clap::Parser; +use cmds::handle_select_cmd; +use futures::future::join_all; +use selection_file::process_line; +use tempfile::Builder; +use tokio::process::Command; + +pub mod cmds; +pub mod selection_file; + +pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<()> { + let temp_file = Builder::new() + .prefix("yt_video_select-") + .suffix(".yts") + .rand_bytes(6) + .tempfile() + .context("Failed to get tempfile")?; + + if use_last_selection { + fs::copy(&app.config.paths.last_selection_path, &temp_file)?; + } else { + let matching_videos = if done { + get_videos(app, VideoStatus::ALL, None).await? + } else { + get_videos( + app, + &[ + VideoStatus::Pick, + // + VideoStatus::Watch, + VideoStatus::Cached, + ], + None, + ) + .await? + }; + + // Warmup the cache for the display rendering of the videos. + // Otherwise the futures would all try to warm it up at the same time. + if let Some(vid) = matching_videos.first() { + let _ = vid.to_formatted_video(app).await?; + } + + let mut edit_file = BufWriter::new(&temp_file); + + join_all( + matching_videos + .into_iter() + .map(|vid| async { vid.to_formatted_video_owned(app).await }) + .collect::<Vec<_>>(), + ) + .await + .into_iter() + .try_for_each(|line| -> Result<()> { + let formatted_line = (&line?).to_select_file_display(); + + edit_file + .write_all(formatted_line.as_bytes()) + .expect("This write should not fail"); + + Ok(()) + })?; + + edit_file.write_all(HELP_STR.as_bytes())?; + edit_file.flush().context("Failed to flush edit file")?; + }; + + { + let editor = env::var("EDITOR").unwrap_or("nvim".to_owned()); + + let mut nvim = Command::new(editor); + nvim.arg(temp_file.path()); + let status = nvim.status().await.context("Falied to run nvim")?; + if !status.success() { + bail!("nvim exited with error status: {}", status) + } + } + + let read_file = temp_file.reopen()?; + fs::copy(temp_file.path(), &app.config.paths.last_selection_path) + .context("Failed to persist selection file")?; + + let reader = BufReader::new(&read_file); + + let mut line_number = 0; + for line in reader.lines() { + let line = line.context("Failed to read a line")?; + + if let Some(line) = process_line(&line)? { + line_number -= 1; + + // debug!( + // "Parsed command: `{}`", + // line.iter() + // .map(|val| format!("\"{}\"", val)) + // .collect::<Vec<String>>() + // .join(" ") + // ); + + let arg_line = ["yt", "select"] + .into_iter() + .chain(line.iter().map(|val| val.as_str())); + + let args = CliArgs::parse_from(arg_line); + + let cmd = if let crate::cli::Command::Select { cmd } = + args.command.expect("This will be some") + { + cmd + } else { + unreachable!("This is checked in the `filter_line` function") + }; + + handle_select_cmd( + app, + cmd.expect("This value should always be some here"), + Some(line_number), + ) + .await? + } + } + + Ok(()) +} + +// // FIXME: There should be no reason why we need to re-run yt, just to get the help string. But I've +// // yet to find a way to do it with out the extra exec <2024-08-20> +// async fn get_help() -> Result<String> { +// let binary_name = current_exe()?; +// let cmd = Command::new(binary_name) +// .args(&["select", "--help"]) +// .output() +// .await?; +// +// assert_eq!(cmd.status.code(), Some(0)); +// +// let output = String::from_utf8(cmd.stdout).expect("Our help output was not utf8?"); +// +// let out = output +// .lines() +// .map(|line| format!("# {}\n", line)) +// .collect::<String>(); +// +// debug!("Returning help: '{}'", &out); +// +// Ok(out) +// } diff --git a/yt/src/select/selection_file/duration.rs b/yt/src/select/selection_file/duration.rs new file mode 100644 index 0000000..a38981c --- /dev/null +++ b/yt/src/select/selection_file/duration.rs @@ -0,0 +1,111 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::str::FromStr; + +use anyhow::{Context, Result}; + +#[derive(Copy, Clone, Debug)] +pub struct Duration { + time: u32, +} + +impl FromStr for Duration { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result<Self, Self::Err> { + fn parse_num(str: &str, suffix: char) -> Result<u32> { + str.strip_suffix(suffix) + .with_context(|| { + format!("Failed to strip suffix '{}' of number: '{}'", suffix, str) + })? + .parse::<u32>() + .with_context(|| format!("Failed to parse '{}'", suffix)) + } + + if s == "[No Duration]" { + return Ok(Self { time: 0 }); + } + + let buf: Vec<_> = s.split(' ').collect(); + + let hours; + let minutes; + let seconds; + + assert_eq!(buf.len(), 2, "Other lengths should not happen"); + + if buf[0].ends_with('h') { + hours = parse_num(buf[0], 'h')?; + minutes = parse_num(buf[1], 'm')?; + seconds = 0; + } else if buf[0].ends_with('m') { + hours = 0; + minutes = parse_num(buf[0], 'm')?; + seconds = parse_num(buf[1], 's')?; + } else { + unreachable!( + "The first part always ends with 'h' or 'm', but was: {:#?}", + buf + ) + } + + Ok(Self { + time: (hours * 60 * 60) + (minutes * 60) + seconds, + }) + } +} + +impl From<Option<f64>> for Duration { + fn from(value: Option<f64>) -> Self { + Self { + time: value.unwrap_or(0.0).ceil() as u32, + } + } +} + +impl std::fmt::Display for Duration { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + const SECOND: u32 = 1; + const MINUTE: u32 = 60 * SECOND; + const HOUR: u32 = 60 * MINUTE; + + let base_hour = self.time - (self.time % HOUR); + let base_min = (self.time % HOUR) - ((self.time % HOUR) % MINUTE); + let base_sec = (self.time % HOUR) % MINUTE; + + let h = base_hour / HOUR; + let m = base_min / MINUTE; + let s = base_sec / SECOND; + + if self.time == 0 { + write!(f, "[No Duration]") + } else if h > 0 { + write!(f, "{h}h {m}m") + } else { + write!(f, "{m}m {s}s") + } + } +} +#[cfg(test)] +mod test { + use super::Duration; + + #[test] + fn test_display_duration_1h() { + let dur = Duration { time: 60 * 60 }; + assert_eq!("1h 0m".to_owned(), dur.to_string()); + } + #[test] + fn test_display_duration_30min() { + let dur = Duration { time: 60 * 30 }; + assert_eq!("30m 0s".to_owned(), dur.to_string()); + } +} diff --git a/yt/src/select/selection_file/help.str b/yt/src/select/selection_file/help.str new file mode 100644 index 0000000..eb76ce5 --- /dev/null +++ b/yt/src/select/selection_file/help.str @@ -0,0 +1,12 @@ +# Commands: +# w, watch [-p,-s,-l] Mark the video given by the hash to be watched +# wd, watched [-p,-s,-l] Mark the video given by the hash as already watched +# d, drop [-p,-s,-l] Mark the video given by the hash to be dropped +# u, url [-p,-s,-l] Open the video URL in Firefox's `timesinks.youtube` profile +# p, pick [-p,-s,-l] Reset the videos status to 'Pick' +# a, add URL Add a video, defined by the URL +# +# See `yt select <cmd_name> --help` for more help. +# +# These lines can be re-ordered; they are executed from top to bottom. +# vim: filetype=yts conceallevel=2 concealcursor=nc colorcolumn= diff --git a/yt/src/select/selection_file/help.str.license b/yt/src/select/selection_file/help.str.license new file mode 100644 index 0000000..d4d410f --- /dev/null +++ b/yt/src/select/selection_file/help.str.license @@ -0,0 +1,9 @@ +yt - A fully featured command line YouTube client + +Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +SPDX-License-Identifier: GPL-3.0-or-later + +This file is part of Yt. + +You should have received a copy of the License along with this program. +If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. diff --git a/yt/src/select/selection_file/mod.rs b/yt/src/select/selection_file/mod.rs new file mode 100644 index 0000000..45809fa --- /dev/null +++ b/yt/src/select/selection_file/mod.rs @@ -0,0 +1,34 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +//! The data structures needed to express the file, which the user edits + +use anyhow::{Context, Result}; +use trinitry::Trinitry; + +pub mod duration; + +pub fn process_line(line: &str) -> Result<Option<Vec<String>>> { + // Filter out comments and empty lines + if line.starts_with('#') || line.trim().is_empty() { + Ok(None) + } else { + // pick 2195db "CouchRecherche? Gunnar und Han von STRG_F sind #mitfunkzuhause" "2020-04-01" "STRG_F - Live" "[1h 5m]" "https://www.youtube.com/watch?v=C8UXOaoMrXY" + + let tri = + Trinitry::new(line).with_context(|| format!("Failed to parse line '{}'", line))?; + + let mut vec = Vec::with_capacity(tri.arguments().len() + 1); + vec.push(tri.command().to_owned()); + vec.extend(tri.arguments().to_vec()); + + Ok(Some(vec)) + } +} diff --git a/yt/src/status/mod.rs b/yt/src/status/mod.rs new file mode 100644 index 0000000..7ffe8d7 --- /dev/null +++ b/yt/src/status/mod.rs @@ -0,0 +1,107 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use anyhow::{Context, Result}; +use bytes::Bytes; + +use crate::{ + app::App, + download::Downloader, + storage::{ + subscriptions::get_subscriptions, + video_database::{getters::get_videos, VideoStatus}, + }, +}; + +macro_rules! get { + ($videos:expr, $status:ident) => { + $videos + .iter() + .filter(|vid| vid.status == VideoStatus::$status) + .count() + }; + (@changing $videos:expr, $status:ident) => { + $videos + .iter() + .filter(|vid| vid.status == VideoStatus::$status && vid.status_change) + .count() + }; +} + +pub async fn show(app: &App) -> Result<()> { + let all_videos = get_videos( + app, + &[ + VideoStatus::Pick, + // + VideoStatus::Watch, + VideoStatus::Cached, + VideoStatus::Watched, + // + VideoStatus::Drop, + VideoStatus::Dropped, + ], + None, + ) + .await?; + + // lengths + let picked_videos_len = get!(all_videos, Pick); + + let watch_videos_len = get!(all_videos, Watch); + let cached_videos_len = get!(all_videos, Cached); + let watched_videos_len = get!(all_videos, Watched); + + let drop_videos_len = get!(all_videos, Drop); + let dropped_videos_len = get!(all_videos, Dropped); + + // changing + let picked_videos_changing = get!(@changing all_videos, Pick); + + let watch_videos_changing = get!(@changing all_videos, Watch); + let cached_videos_changing = get!(@changing all_videos, Cached); + let watched_videos_changing = get!(@changing all_videos, Watched); + + let drop_videos_changing = get!(@changing all_videos, Drop); + let dropped_videos_changing = get!(@changing all_videos, Dropped); + + let subscriptions = get_subscriptions(app).await?; + let subscriptions_len = subscriptions.0.len(); + + let cache_usage_raw = Downloader::get_current_cache_allocation(app) + .await + .context("Failed to get current cache allocation")?; + let cache_usage = Bytes::new(cache_usage_raw); + println!( + "\ +Picked Videos: {picked_videos_len} ({picked_videos_changing} changing) + +Watch Videos: {watch_videos_len} ({watch_videos_changing} changing) +Cached Videos: {cached_videos_len} ({cached_videos_changing} changing) +Watched Videos: {watched_videos_len} ({watched_videos_changing} changing) + +Drop Videos: {drop_videos_len} ({drop_videos_changing} changing) +Dropped Videos: {dropped_videos_len} ({dropped_videos_changing} changing) + + + Subscriptions: {subscriptions_len} + Cache usage: {cache_usage}" + ); + + Ok(()) +} + +pub fn config(app: &App) -> Result<()> { + let config_str = toml::to_string(&app.config)?; + + print!("{}", config_str); + + Ok(()) +} diff --git a/yt/src/storage/mod.rs b/yt/src/storage/mod.rs new file mode 100644 index 0000000..6a12d8b --- /dev/null +++ b/yt/src/storage/mod.rs @@ -0,0 +1,12 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +pub mod subscriptions; +pub mod video_database; diff --git a/yt/src/storage/subscriptions.rs b/yt/src/storage/subscriptions.rs new file mode 100644 index 0000000..22edd08 --- /dev/null +++ b/yt/src/storage/subscriptions.rs @@ -0,0 +1,140 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +//! Handle subscriptions + +use std::collections::HashMap; + +use anyhow::Result; +use log::debug; +use serde_json::{json, Value}; +use sqlx::query; +use url::Url; +use yt_dlp::wrapper::info_json::InfoType; + +use crate::app::App; + +#[derive(Clone, Debug)] +pub struct Subscription { + /// The human readable name of this subscription + pub name: String, + + /// The URL this subscription subscribes to + pub url: Url, +} + +impl Subscription { + pub fn new(name: String, url: Url) -> Self { + Self { name, url } + } +} + +/// Check whether an URL could be used as a subscription URL +pub async fn check_url(url: &Url) -> Result<bool> { + let yt_opts = match json!( { + "playliststart": 1, + "playlistend": 10, + "noplaylist": false, + "extract_flat": "in_playlist", + }) { + Value::Object(map) => map, + _ => unreachable!("This is hardcoded"), + }; + + let info = yt_dlp::extract_info(&yt_opts, url, false, false).await?; + + debug!("{:#?}", info); + + Ok(info._type == Some(InfoType::Playlist)) +} + +#[derive(Default)] +pub struct Subscriptions(pub(crate) HashMap<String, Subscription>); + +pub async fn remove_all_subscriptions(app: &App) -> Result<()> { + query!( + " + DELETE FROM subscriptions; + ", + ) + .execute(&app.database) + .await?; + + Ok(()) +} + +/// Get a list of subscriptions +pub async fn get_subscriptions(app: &App) -> Result<Subscriptions> { + let raw_subs = query!( + " + SELECT * + FROM subscriptions; + " + ) + .fetch_all(&app.database) + .await?; + + let subscriptions: HashMap<String, Subscription> = raw_subs + .into_iter() + .map(|sub| { + ( + sub.name.clone(), + Subscription::new( + sub.name, + Url::parse(&sub.url).expect("This should be valid"), + ), + ) + }) + .collect(); + + Ok(Subscriptions(subscriptions)) +} + +pub async fn add_subscription(app: &App, sub: &Subscription) -> Result<()> { + let url = sub.url.to_string(); + + query!( + " + INSERT INTO subscriptions ( + name, + url + ) VALUES (?, ?); + ", + sub.name, + url + ) + .execute(&app.database) + .await?; + + println!("Subscribed to '{}' at '{}'", sub.name, sub.url); + Ok(()) +} + +pub async fn remove_subscription(app: &App, sub: &Subscription) -> Result<()> { + let output = query!( + " + DELETE FROM subscriptions + WHERE name = ? + ", + sub.name, + ) + .execute(&app.database) + .await?; + + assert_eq!( + output.rows_affected(), + 1, + "The remove subscriptino query did effect more (or less) than one row. This is a bug." + ); + + println!("Unsubscribed from '{}' at '{}'", sub.name, sub.url); + + Ok(()) +} diff --git a/yt/src/storage/video_database/downloader.rs b/yt/src/storage/video_database/downloader.rs new file mode 100644 index 0000000..ccd4ca9 --- /dev/null +++ b/yt/src/storage/video_database/downloader.rs @@ -0,0 +1,153 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::path::{Path, PathBuf}; + +use anyhow::Result; +use log::debug; +use sqlx::query; +use url::Url; + +use crate::{app::App, storage::video_database::VideoStatus}; + +use super::{ExtractorHash, Video}; + +/// Returns to next video which should be downloaded. This respects the priority assigned by select. +/// It does not return videos, which are already cached. +pub async fn get_next_uncached_video(app: &App) -> Result<Option<Video>> { + let status = VideoStatus::Watch.as_db_integer(); + + // NOTE: The ORDER BY statement should be the same as the one in [`getters::get_videos`].<2024-08-22> + let result = query!( + r#" + SELECT * + FROM videos + WHERE status = ? AND cache_path IS NULL + ORDER BY priority DESC, publish_date DESC + LIMIT 1; + "#, + status + ) + .fetch_one(&app.database) + .await; + + if let Err(sqlx::Error::RowNotFound) = result { + Ok(None) + } else { + let base = result?; + + let thumbnail_url = base + .thumbnail_url + .as_ref() + .map(|url| Url::parse(url).expect("Parsing this as url should always work")); + + let status_change = if base.status_change == 1 { + true + } else { + assert_eq!(base.status_change, 0, "Can only be 1 or 0"); + false + }; + + let video = Video { + cache_path: base.cache_path.as_ref().map(PathBuf::from), + description: base.description.clone(), + duration: base.duration, + extractor_hash: ExtractorHash::from_hash( + base.extractor_hash + .parse() + .expect("The hash in the db should be valid"), + ), + last_status_change: base.last_status_change, + parent_subscription_name: base.parent_subscription_name.clone(), + priority: base.priority, + publish_date: base.publish_date, + status: VideoStatus::from_db_integer(base.status), + status_change, + thumbnail_url, + title: base.title.clone(), + url: Url::parse(&base.url).expect("Parsing this as url should always work"), + }; + + Ok(Some(video)) + } +} + +/// Update the cached path of a video. Will be set to NULL if the path is None +/// This will also set the status to `Cached` when path is Some, otherwise it set's the status to +/// `Watch`. +pub async fn set_video_cache_path( + app: &App, + video: &ExtractorHash, + path: Option<&Path>, +) -> Result<()> { + if let Some(path) = path { + debug!( + "Setting cache path from '{}' to '{}'", + video.into_short_hash(app).await?, + path.display() + ); + + let path_str = path.display().to_string(); + let extractor_hash = video.hash().to_string(); + let status = VideoStatus::Cached.as_db_integer(); + + query!( + r#" + UPDATE videos + SET cache_path = ?, status = ? + WHERE extractor_hash = ?; + "#, + path_str, + status, + extractor_hash + ) + .execute(&app.database) + .await?; + + Ok(()) + } else { + debug!( + "Setting cache path from '{}' to NULL", + video.into_short_hash(app).await?, + ); + + let extractor_hash = video.hash().to_string(); + let status = VideoStatus::Watch.as_db_integer(); + + query!( + r#" + UPDATE videos + SET cache_path = NULL, status = ? + WHERE extractor_hash = ?; + "#, + status, + extractor_hash + ) + .execute(&app.database) + .await?; + + Ok(()) + } +} + +/// Returns the number of cached videos +pub async fn get_allocated_cache(app: &App) -> Result<u32> { + let count = query!( + r#" + SELECT COUNT(cache_path) as count + FROM videos + WHERE cache_path IS NOT NULL; +"#, + ) + .fetch_one(&app.database) + .await?; + + Ok(count.count as u32) +} diff --git a/yt/src/storage/video_database/extractor_hash.rs b/yt/src/storage/video_database/extractor_hash.rs new file mode 100644 index 0000000..c956919 --- /dev/null +++ b/yt/src/storage/video_database/extractor_hash.rs @@ -0,0 +1,159 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{collections::HashMap, fmt::Display, str::FromStr}; + +use anyhow::{bail, Context, Result}; +use blake3::Hash; +use log::debug; +use tokio::sync::OnceCell; + +use crate::{app::App, storage::video_database::getters::get_all_hashes}; + +static EXTRACTOR_HASH_LENGTH: OnceCell<usize> = OnceCell::const_new(); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExtractorHash { + hash: Hash, +} + +impl Display for ExtractorHash { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.hash.fmt(f) + } +} + +#[derive(Debug, Clone)] +pub struct ShortHash(String); + +impl Display for ShortHash { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +#[derive(Debug, Clone)] +pub struct LazyExtractorHash { + value: ShortHash, +} + +impl FromStr for LazyExtractorHash { + type Err = anyhow::Error; + + fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { + // perform some cheap validation + if s.len() > 64 { + bail!("A hash can only contain 64 bytes!"); + } + + Ok(Self { + value: ShortHash(s.to_owned()), + }) + } +} + +impl LazyExtractorHash { + /// Turn the [`LazyExtractorHash`] into the [`ExtractorHash`] + pub async fn realize(self, app: &App) -> Result<ExtractorHash> { + ExtractorHash::from_short_hash(app, &self.value).await + } +} + +impl ExtractorHash { + pub fn from_hash(hash: Hash) -> Self { + Self { hash } + } + pub async fn from_short_hash(app: &App, s: &ShortHash) -> Result<Self> { + Ok(Self { + hash: Self::short_hash_to_full_hash(app, s).await?, + }) + } + + pub fn hash(&self) -> &Hash { + &self.hash + } + + pub async fn into_short_hash(&self, app: &App) -> Result<ShortHash> { + let needed_chars = if let Some(needed_chars) = EXTRACTOR_HASH_LENGTH.get() { + *needed_chars + } else { + let needed_chars = self + .get_needed_char_len(app) + .await + .context("Failed to calculate needed char length")?; + EXTRACTOR_HASH_LENGTH + .set(needed_chars) + .expect("This should work at this stage"); + + needed_chars + }; + + Ok(ShortHash( + self.hash() + .to_hex() + .chars() + .take(needed_chars) + .collect::<String>(), + )) + } + + async fn short_hash_to_full_hash(app: &App, s: &ShortHash) -> Result<Hash> { + let all_hashes = get_all_hashes(app) + .await + .context("Failed to fetch all extractor -hashesh from database")?; + + let needed_chars = s.0.len(); + + for hash in all_hashes { + if hash.to_hex()[..needed_chars] == s.0 { + return Ok(hash); + } + } + + bail!("Your shortend hash, does not match a real hash (this is probably a bug)!"); + } + + async fn get_needed_char_len(&self, app: &App) -> Result<usize> { + debug!("Calculating the needed hash char length"); + let all_hashes = get_all_hashes(app) + .await + .context("Failed to fetch all extractor -hashesh from database")?; + + let all_char_vec_hashes = all_hashes + .into_iter() + .map(|hash| hash.to_hex().chars().collect::<Vec<char>>()) + .collect::<Vec<Vec<_>>>(); + + // This value should be updated later, if not rust will panic in the assertion. + let mut needed_chars: usize = 1000; + 'outer: for i in 1..64 { + let i_chars: Vec<String> = all_char_vec_hashes + .iter() + .map(|vec| vec.iter().take(i).collect::<String>()) + .collect(); + + let mut uniqnes_hashmap: HashMap<String, ()> = HashMap::new(); + for ch in i_chars { + if let Some(()) = uniqnes_hashmap.insert(ch, ()) { + // The key was already in the hash map, thus we have a duplicated char and need + // at least one char more + continue 'outer; + } + } + + needed_chars = i; + break 'outer; + } + + assert!(needed_chars <= 64, "Hashes are only 64 bytes long"); + + Ok(needed_chars) + } +} diff --git a/yt/src/storage/video_database/getters.rs b/yt/src/storage/video_database/getters.rs new file mode 100644 index 0000000..29dd014 --- /dev/null +++ b/yt/src/storage/video_database/getters.rs @@ -0,0 +1,345 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +//! These functions interact with the storage db in a read-only way. They are added on-demaned (as +//! you could theoretically just could do everything with the `get_videos` function), as +//! performance or convince requires. +use std::{fs::File, path::PathBuf}; + +use anyhow::{bail, Context, Result}; +use blake3::Hash; +use log::debug; +use sqlx::{query, QueryBuilder, Row, Sqlite}; +use url::Url; +use yt_dlp::wrapper::info_json::InfoJson; + +use crate::{ + app::App, + storage::{ + subscriptions::Subscription, + video_database::{extractor_hash::ExtractorHash, Video}, + }, +}; + +use super::{MpvOptions, VideoOptions, VideoStatus, YtDlpOptions}; + +macro_rules! video_from_record { + ($record:expr) => { + let thumbnail_url = if let Some(url) = &$record.thumbnail_url { + Some(Url::parse(&url).expect("Parsing this as url should always work")) + } else { + None + }; + + Ok(Video { + cache_path: $record.cache_path.as_ref().map(|val| PathBuf::from(val)), + description: $record.description.clone(), + duration: $record.duration, + extractor_hash: ExtractorHash::from_hash( + $record + .extractor_hash + .parse() + .expect("The db hash should be a valid blake3 hash"), + ), + last_status_change: $record.last_status_change, + parent_subscription_name: $record.parent_subscription_name.clone(), + publish_date: $record.publish_date, + status: VideoStatus::from_db_integer($record.status), + thumbnail_url, + title: $record.title.clone(), + url: Url::parse(&$record.url).expect("Parsing this as url should always work"), + priority: $record.priority, + status_change: if $record.status_change == 1 { + true + } else { + assert_eq!($record.status_change, 0); + false + }, + }) + }; +} + +/// Get the lines to display at the selection file +/// [`changing` = true]: Means that we include *only* videos, that have the `status_changing` flag set +/// [`changing` = None]: Means that we include *both* videos, that have the `status_changing` flag set and not set +pub async fn get_videos( + app: &App, + allowed_states: &[VideoStatus], + changing: Option<bool>, +) -> Result<Vec<Video>> { + let mut qb: QueryBuilder<Sqlite> = QueryBuilder::new( + "\ + SELECT * + FROM videos + WHERE status IN ", + ); + + qb.push("("); + allowed_states + .iter() + .enumerate() + .for_each(|(index, state)| { + qb.push("'"); + qb.push(state.as_db_integer()); + qb.push("'"); + + if index != allowed_states.len() - 1 { + qb.push(","); + } + }); + qb.push(")"); + + if let Some(val) = changing { + if val { + qb.push(" AND status_change = 1"); + } else { + qb.push(" AND status_change = 0"); + } + } + + qb.push("\n ORDER BY priority DESC, publish_date DESC;"); + + debug!("Will run: \"{}\"", qb.sql()); + + let videos = qb.build().fetch_all(&app.database).await.with_context(|| { + format!( + "Failed to query videos with states: '{}'", + allowed_states.iter().fold(String::new(), |mut acc, state| { + acc.push(' '); + acc.push_str(state.as_str()); + acc + }), + ) + })?; + + let real_videos: Vec<Video> = videos + .iter() + .map(|base| -> Result<Video> { + Ok(Video { + cache_path: base + .get::<Option<String>, &str>("cache_path") + .as_ref() + .map(PathBuf::from), + description: base.get::<Option<String>, &str>("description").clone(), + duration: base.get("duration"), + extractor_hash: ExtractorHash::from_hash( + base.get::<String, &str>("extractor_hash") + .parse() + .expect("The db hash should be a valid blake3 hash"), + ), + last_status_change: base.get("last_status_change"), + parent_subscription_name: base + .get::<Option<String>, &str>("parent_subscription_name") + .clone(), + publish_date: base.get("publish_date"), + status: VideoStatus::from_db_integer(base.get("status")), + thumbnail_url: base + .get::<Option<String>, &str>("thumbnail_url") + .as_ref() + .map(|url| Url::parse(url).expect("Parsing this as url should always work")), + title: base.get::<String, &str>("title").to_owned(), + url: Url::parse(base.get("url")).expect("Parsing this as url should always work"), + priority: base.get("priority"), + status_change: { + let val = base.get::<i64, &str>("status_change"); + if val == 1 { + true + } else { + assert_eq!(val, 0, "Can only be 1 or 0"); + false + } + }, + }) + }) + .collect::<Result<Vec<Video>>>()?; + + Ok(real_videos) +} + +pub async fn get_video_info_json(video: &Video) -> Result<Option<InfoJson>> { + if let Some(mut path) = video.cache_path.clone() { + if !path.set_extension("info.json") { + bail!( + "Failed to change path extension to 'info.json': {}", + path.display() + ); + } + let info_json_string = File::open(path)?; + let info_json: InfoJson = serde_json::from_reader(&info_json_string)?; + + Ok(Some(info_json)) + } else { + Ok(None) + } +} + +pub async fn get_video_by_hash(app: &App, hash: &ExtractorHash) -> Result<Video> { + let ehash = hash.hash().to_string(); + + let raw_video = query!( + " + SELECT * FROM videos WHERE extractor_hash = ?; + ", + ehash + ) + .fetch_one(&app.database) + .await?; + + video_from_record! {raw_video} +} + +pub async fn get_currently_playing_video(app: &App) -> Result<Option<Video>> { + let mut videos: Vec<Video> = get_changing_videos(app, VideoStatus::Cached).await?; + + if videos.is_empty() { + Ok(None) + } else { + assert_eq!( + videos.len(), + 1, + "Only one video can change from cached to watched at once!" + ); + + Ok(Some(videos.remove(0))) + } +} + +pub async fn get_changing_videos(app: &App, old_state: VideoStatus) -> Result<Vec<Video>> { + let status = old_state.as_db_integer(); + + let matching = query!( + r#" + SELECT * + FROM videos + WHERE status_change = 1 AND status = ?; + "#, + status + ) + .fetch_all(&app.database) + .await?; + + let real_videos: Vec<Video> = matching + .iter() + .map(|base| -> Result<Video> { + video_from_record! {base} + }) + .collect::<Result<Vec<Video>>>()?; + + Ok(real_videos) +} + +pub async fn get_all_hashes(app: &App) -> Result<Vec<Hash>> { + let hashes_hex = query!( + r#" + SELECT extractor_hash + FROM videos; + "# + ) + .fetch_all(&app.database) + .await?; + + Ok(hashes_hex + .iter() + .map(|hash| { + Hash::from_hex(&hash.extractor_hash) + .expect("These values started as blake3 hashes, they should stay blake3 hashes") + }) + .collect()) +} + +pub async fn get_video_hashes(app: &App, subs: &Subscription) -> Result<Vec<Hash>> { + let hashes_hex = query!( + r#" + SELECT extractor_hash + FROM videos + WHERE parent_subscription_name = ?; + "#, + subs.name + ) + .fetch_all(&app.database) + .await?; + + Ok(hashes_hex + .iter() + .map(|hash| { + Hash::from_hex(&hash.extractor_hash) + .expect("These values started as blake3 hashes, they should stay blake3 hashes") + }) + .collect()) +} + +pub async fn get_video_yt_dlp_opts(app: &App, hash: &ExtractorHash) -> Result<YtDlpOptions> { + let ehash = hash.hash().to_string(); + + let yt_dlp_options = query!( + r#" + SELECT subtitle_langs + FROM video_options + WHERE extractor_hash = ?; + "#, + ehash + ) + .fetch_one(&app.database) + .await + .with_context(|| { + format!( + "Failed to fetch the `yt_dlp_video_opts` for video: {}", + hash + ) + })?; + + Ok(YtDlpOptions { + subtitle_langs: yt_dlp_options.subtitle_langs, + }) +} +pub async fn get_video_mpv_opts(app: &App, hash: &ExtractorHash) -> Result<MpvOptions> { + let ehash = hash.hash().to_string(); + + let mpv_options = query!( + r#" + SELECT playback_speed + FROM video_options + WHERE extractor_hash = ?; + "#, + ehash + ) + .fetch_one(&app.database) + .await + .with_context(|| format!("Failed to fetch the `mpv_video_opts` for video: {}", hash))?; + + Ok(MpvOptions { + playback_speed: mpv_options.playback_speed, + }) +} + +pub async fn get_video_opts(app: &App, hash: &ExtractorHash) -> Result<VideoOptions> { + let ehash = hash.hash().to_string(); + + let opts = query!( + r#" + SELECT playback_speed, subtitle_langs + FROM video_options + WHERE extractor_hash = ?; + "#, + ehash + ) + .fetch_one(&app.database) + .await + .with_context(|| format!("Failed to fetch the `video_opts` for video: {}", hash))?; + + let mpv = MpvOptions { + playback_speed: opts.playback_speed, + }; + let yt_dlp = YtDlpOptions { + subtitle_langs: opts.subtitle_langs, + }; + + Ok(VideoOptions { mpv, yt_dlp }) +} diff --git a/yt/src/storage/video_database/mod.rs b/yt/src/storage/video_database/mod.rs new file mode 100644 index 0000000..1765f79 --- /dev/null +++ b/yt/src/storage/video_database/mod.rs @@ -0,0 +1,179 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{fmt::Write, path::PathBuf}; + +use url::Url; + +use crate::{app::App, storage::video_database::extractor_hash::ExtractorHash}; + +pub mod downloader; +pub mod extractor_hash; +pub mod getters; +pub mod setters; + +#[derive(Debug, Clone)] +pub struct Video { + pub cache_path: Option<PathBuf>, + pub description: Option<String>, + pub duration: Option<f64>, + pub extractor_hash: ExtractorHash, + pub last_status_change: i64, + /// The associated subscription this video was fetched from (null, when the video was `add`ed) + pub parent_subscription_name: Option<String>, + pub priority: i64, + pub publish_date: Option<i64>, + pub status: VideoStatus, + /// The video is currently changing its state (for example from being `SELECT` to being `CACHE`) + pub status_change: bool, + pub thumbnail_url: Option<Url>, + pub title: String, + pub url: Url, +} + +#[derive(Debug)] +pub struct VideoOptions { + pub yt_dlp: YtDlpOptions, + pub mpv: MpvOptions, +} +impl VideoOptions { + pub(crate) fn new(subtitle_langs: String, playback_speed: f64) -> Self { + let yt_dlp = YtDlpOptions { subtitle_langs }; + let mpv = MpvOptions { playback_speed }; + Self { yt_dlp, mpv } + } + + /// This will write out the options that are different from the defaults. + /// Beware, that this does not set the priority. + pub fn to_cli_flags(self, app: &App) -> String { + let mut f = String::new(); + + if self.mpv.playback_speed != app.config.select.playback_speed { + write!(f, " --speed '{}'", self.mpv.playback_speed).expect("Works"); + } + if self.yt_dlp.subtitle_langs != app.config.select.subtitle_langs { + write!(f, " --subtitle-langs '{}'", self.yt_dlp.subtitle_langs).expect("Works"); + } + + f.trim().to_owned() + } +} + +#[derive(Debug)] +/// Additionally settings passed to mpv on watch +pub struct MpvOptions { + /// The playback speed. (1 is 100%, 2.7 is 270%, and so on) + pub playback_speed: f64, +} + +#[derive(Debug)] +/// Additionally configuration options, passed to yt-dlp on download +pub struct YtDlpOptions { + /// In the form of `lang1,lang2,lang3` (e.g. `en,de,sv`) + pub subtitle_langs: String, +} + +/// # Video Lifetime (words in <brackets> are commands): +/// <Pick> +/// / \ +/// <Watch> <Drop> -> Dropped // yt select +/// | +/// Cache // yt cache +/// | +/// Watched // yt watch +#[derive(Default, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] +pub enum VideoStatus { + #[default] + Pick, + + /// The video has been select to be watched + Watch, + /// The video has been cached and is ready to be watched + Cached, + /// The video has been watched + Watched, + + /// The video has been select to be dropped + Drop, + /// The video has been dropped + Dropped, +} + +impl VideoStatus { + pub const ALL: &'static [Self; 6] = &[ + Self::Pick, + // + VideoStatus::Watch, + VideoStatus::Cached, + VideoStatus::Watched, + // + VideoStatus::Drop, + VideoStatus::Dropped, + ]; + + pub fn as_command(&self) -> &str { + // NOTE: Keep the serialize able variants synced with the main `select` function <2024-06-14> + // Also try to ensure, that the strings have the same length + match self { + VideoStatus::Pick => "pick ", + + VideoStatus::Watch => "watch ", + VideoStatus::Cached => "watch ", + VideoStatus::Watched => "watched", + + VideoStatus::Drop => "drop ", + VideoStatus::Dropped => "drop ", + } + } + + pub fn as_db_integer(&self) -> i64 { + // These numbers should not change their mapping! + // Oh, and keep them in sync with the SQLite check constraint. + match self { + VideoStatus::Pick => 0, + + VideoStatus::Watch => 1, + VideoStatus::Cached => 2, + VideoStatus::Watched => 3, + + VideoStatus::Drop => 4, + VideoStatus::Dropped => 5, + } + } + pub fn from_db_integer(num: i64) -> Self { + match num { + 0 => Self::Pick, + + 1 => Self::Watch, + 2 => Self::Cached, + 3 => Self::Watched, + + 4 => Self::Drop, + 5 => Self::Dropped, + other => unreachable!( + "The database returned a enum discriminator, unknown to us: '{}'", + other + ), + } + } + + pub fn as_str(&self) -> &'static str { + match self { + VideoStatus::Pick => "Pick", + + VideoStatus::Watch => "Watch", + VideoStatus::Cached => "Cache", + VideoStatus::Watched => "Watched", + + VideoStatus::Drop => "Drop", + VideoStatus::Dropped => "Dropped", + } + } +} diff --git a/yt/src/storage/video_database/schema.sql b/yt/src/storage/video_database/schema.sql new file mode 100644 index 0000000..3afd091 --- /dev/null +++ b/yt/src/storage/video_database/schema.sql @@ -0,0 +1,57 @@ +-- yt - A fully featured command line YouTube client +-- +-- Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +-- SPDX-License-Identifier: GPL-3.0-or-later +-- +-- This file is part of Yt. +-- +-- You should have received a copy of the License along with this program. +-- If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +-- All tables should be declared STRICT, as I actually like to have types checking (and a +-- db that doesn't lie to me). + +-- Keep this table in sync with the `Video` structure +CREATE TABLE IF NOT EXISTS videos ( + cache_path TEXT UNIQUE CHECK (CASE WHEN cache_path IS NOT NULL THEN + status == 2 + ELSE + 1 + END), + description TEXT, + duration REAL, + extractor_hash TEXT UNIQUE NOT NULL PRIMARY KEY, + last_status_change INTEGER NOT NULL, + parent_subscription_name TEXT, + priority INTEGER NOT NULL DEFAULT 0, + publish_date INTEGER, + status INTEGER NOT NULL DEFAULT 0 CHECK (status IN (0, 1, 2, 3, 4, 5) AND + CASE WHEN status == 2 THEN + cache_path IS NOT NULL + ELSE + 1 + END AND + CASE WHEN status != 2 THEN + cache_path IS NULL + ELSE + 1 + END), + status_change INTEGER NOT NULL DEFAULT 0 CHECK (status_change IN (0, 1)), + thumbnail_url TEXT, + title TEXT NOT NULL, + url TEXT UNIQUE NOT NULL +) STRICT; + +-- Store additional metadata for the videos marked to be watched +CREATE TABLE IF NOT EXISTS video_options ( + extractor_hash TEXT UNIQUE NOT NULL PRIMARY KEY, + subtitle_langs TEXT NOT NULL, + playback_speed REAL NOT NULL, + FOREIGN KEY(extractor_hash) REFERENCES videos (extractor_hash) +) STRICT; + +-- Store subscriptions +CREATE TABLE IF NOT EXISTS subscriptions ( + name TEXT UNIQUE NOT NULL PRIMARY KEY, + url TEXT NOT NULL +) STRICT; diff --git a/yt/src/storage/video_database/setters.rs b/yt/src/storage/video_database/setters.rs new file mode 100644 index 0000000..c160138 --- /dev/null +++ b/yt/src/storage/video_database/setters.rs @@ -0,0 +1,270 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +//! These functions change the database. They are added on a demand basis. + +use anyhow::Result; +use chrono::Utc; +use log::{debug, info}; +use sqlx::query; +use tokio::fs; + +use crate::{app::App, storage::video_database::extractor_hash::ExtractorHash}; + +use super::{Video, VideoOptions, VideoStatus}; + +/// Set a new status for a video. +/// This will only update the status time stamp/priority when the status or the priority has changed . +pub async fn set_video_status( + app: &App, + video_hash: &ExtractorHash, + new_status: VideoStatus, + new_priority: Option<i64>, +) -> Result<()> { + let video_hash = video_hash.hash().to_string(); + + let old = query!( + r#" + SELECT status, priority, cache_path + FROM videos + WHERE extractor_hash = ? + "#, + video_hash + ) + .fetch_one(&app.database) + .await?; + + let cache_path = if (VideoStatus::from_db_integer(old.status) == VideoStatus::Cached) + && (new_status != VideoStatus::Cached) + { + None + } else { + old.cache_path.as_deref() + }; + + let new_status = new_status.as_db_integer(); + + if let Some(new_priority) = new_priority { + if old.status == new_status && old.priority == new_priority { + return Ok(()); + } + + let now = Utc::now().timestamp(); + + debug!( + "Running status change: {:#?} -> {:#?}...", + VideoStatus::from_db_integer(old.status), + VideoStatus::from_db_integer(new_status), + ); + + query!( + r#" + UPDATE videos + SET status = ?, last_status_change = ?, priority = ?, cache_path = ? + WHERE extractor_hash = ?; + "#, + new_status, + now, + new_priority, + cache_path, + video_hash + ) + .execute(&app.database) + .await?; + } else { + if old.status == new_status { + return Ok(()); + } + + let now = Utc::now().timestamp(); + + debug!( + "Running status change: {:#?} -> {:#?}...", + VideoStatus::from_db_integer(old.status), + VideoStatus::from_db_integer(new_status), + ); + + query!( + r#" + UPDATE videos + SET status = ?, last_status_change = ?, cache_path = ? + WHERE extractor_hash = ?; + "#, + new_status, + now, + cache_path, + video_hash + ) + .execute(&app.database) + .await?; + } + + debug!("Finished status change."); + Ok(()) +} + +/// Mark a video as watched. +/// This will both set the status to `Watched` and the cache_path to Null. +pub async fn set_video_watched(app: &App, video: &Video) -> Result<()> { + let video_hash = video.extractor_hash.hash().to_string(); + let new_status = VideoStatus::Watched.as_db_integer(); + + info!("Will set video watched: '{}'", video.title); + + let old = query!( + r#" + SELECT status, priority + FROM videos + WHERE extractor_hash = ? + "#, + video_hash + ) + .fetch_one(&app.database) + .await?; + + assert_ne!( + old.status, new_status, + "The video should not be marked as watched already." + ); + assert_eq!( + old.status, + VideoStatus::Cached.as_db_integer(), + "The video should have been marked cached" + ); + + let now = Utc::now().timestamp(); + + if let Some(path) = &video.cache_path { + if let Ok(true) = path.try_exists() { + fs::remove_file(path).await? + } + } + + query!( + r#" + UPDATE videos + SET status = ?, last_status_change = ?, cache_path = NULL + WHERE extractor_hash = ?; + "#, + new_status, + now, + video_hash + ) + .execute(&app.database) + .await?; + + Ok(()) +} + +pub async fn set_state_change( + app: &App, + video_extractor_hash: &ExtractorHash, + changing: bool, +) -> Result<()> { + let state_change = if changing { 1 } else { 0 }; + let video_extractor_hash = video_extractor_hash.hash().to_string(); + + query!( + r#" + UPDATE videos + SET status_change = ? + WHERE extractor_hash = ?; + "#, + state_change, + video_extractor_hash, + ) + .execute(&app.database) + .await?; + + Ok(()) +} + +pub async fn set_video_options( + app: &App, + hash: &ExtractorHash, + video_options: &VideoOptions, +) -> Result<()> { + let video_extractor_hash = hash.hash().to_string(); + let playback_speed = video_options.mpv.playback_speed; + let subtitle_langs = &video_options.yt_dlp.subtitle_langs; + + query!( + r#" + UPDATE video_options + SET playback_speed = ?, subtitle_langs = ? + WHERE extractor_hash = ?; + "#, + playback_speed, + subtitle_langs, + video_extractor_hash, + ) + .execute(&app.database) + .await?; + + Ok(()) +} + +pub async fn add_video(app: &App, video: Video) -> Result<()> { + let parent_subscription_name = video.parent_subscription_name; + + let thumbnail_url = video.thumbnail_url.map(|val| val.to_string()); + + let status = video.status.as_db_integer(); + let status_change = if video.status_change { 1 } else { 0 }; + let url = video.url.to_string(); + let extractor_hash = video.extractor_hash.hash().to_string(); + + let default_subtitle_langs = &app.config.select.subtitle_langs; + let default_mpv_playback_speed = app.config.select.playback_speed; + + query!( + r#" + BEGIN; + INSERT INTO videos ( + parent_subscription_name, + status, + status_change, + last_status_change, + title, + url, + description, + duration, + publish_date, + thumbnail_url, + extractor_hash) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + + INSERT INTO video_options ( + extractor_hash, + subtitle_langs, + playback_speed) + VALUES (?, ?, ?); + COMMIT; + "#, + parent_subscription_name, + status, + status_change, + video.last_status_change, + video.title, + url, + video.description, + video.duration, + video.publish_date, + thumbnail_url, + extractor_hash, + extractor_hash, + default_subtitle_langs, + default_mpv_playback_speed + ) + .execute(&app.database) + .await?; + + Ok(()) +} diff --git a/yt/src/subscribe/mod.rs b/yt/src/subscribe/mod.rs new file mode 100644 index 0000000..74d88b4 --- /dev/null +++ b/yt/src/subscribe/mod.rs @@ -0,0 +1,184 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::str::FromStr; + +use anyhow::{bail, Context, Result}; +use futures::FutureExt; +use log::warn; +use serde_json::{json, Value}; +use tokio::io::{AsyncBufRead, AsyncBufReadExt}; +use url::Url; +use yt_dlp::wrapper::info_json::InfoType; + +use crate::{ + app::App, + storage::subscriptions::{ + add_subscription, check_url, get_subscriptions, remove_all_subscriptions, + remove_subscription, Subscription, + }, +}; + +pub async fn unsubscribe(app: &App, name: String) -> Result<()> { + let present_subscriptions = get_subscriptions(app).await?; + + if let Some(subscription) = present_subscriptions.0.get(&name) { + remove_subscription(app, subscription).await?; + } else { + bail!("Couldn't find subscription: '{}'", &name); + } + + Ok(()) +} + +pub async fn import<W: AsyncBufRead + AsyncBufReadExt + Unpin>( + app: &App, + reader: W, + force: bool, +) -> Result<()> { + if force { + remove_all_subscriptions(app).await?; + } + + let mut lines = reader.lines(); + while let Some(line) = lines.next_line().await? { + let url = + Url::from_str(&line).with_context(|| format!("Failed to parse '{}' as url", line))?; + match subscribe(app, None, url) + .await + .with_context(|| format!("Failed to subscribe to: '{}'", line)) + { + Ok(_) => (), + Err(err) => eprintln!( + "Error while subscribing to '{}': '{}'", + line, + err.source().expect("Should have a source") + ), + } + } + + Ok(()) +} + +pub async fn subscribe(app: &App, name: Option<String>, url: Url) -> Result<()> { + if !(url.as_str().ends_with("videos") + || url.as_str().ends_with("streams") + || url.as_str().ends_with("shorts") + || url.as_str().ends_with("videos/") + || url.as_str().ends_with("streams/") + || url.as_str().ends_with("shorts/")) + && url.as_str().contains("youtube.com") + { + warn!("Your youtbe url does not seem like it actually tracks a channels playlist (videos, streams, shorts). Adding subscriptions for each of them..."); + + let url = Url::parse(&(url.as_str().to_owned() + "/")) + .expect("This was an url, it should stay one"); + + if let Some(name) = name { + let out: Result<()> = async move { + actual_subscribe( + app, + Some(name.clone() + " {Videos}"), + url.join("videos/").expect("Works"), + ) + .await + .with_context(|| { + format!("Failed to subscribe to '{}'", name.clone() + " {Videos}") + })?; + + actual_subscribe( + app, + Some(name.clone() + " {Streams}"), + url.join("streams/").expect("Works"), + ) + .await + .with_context(|| { + format!("Failed to subscribe to '{}'", name.clone() + " {Streams}") + })?; + + actual_subscribe( + app, + Some(name.clone() + " {Shorts}"), + url.join("shorts/").expect("Works"), + ) + .await + .with_context(|| format!("Failed to subscribe to '{}'", name + " {Shorts}"))?; + + Ok(()) + } + .boxed() + .await; + + out? + } else { + actual_subscribe(app, None, url.join("videos/").expect("Works")) + .await + .with_context(|| format!("Failed to subscribe to the '{}' variant", "{Videos}"))?; + + actual_subscribe(app, None, url.join("streams/").expect("Works")) + .await + .with_context(|| format!("Failed to subscribe to the '{}' variant", "{Streams}"))?; + + actual_subscribe(app, None, url.join("shorts/").expect("Works")) + .await + .with_context(|| format!("Failed to subscribe to the '{}' variant", "{Shorts}"))?; + } + } else { + actual_subscribe(app, name, url).await?; + } + + Ok(()) +} + +async fn actual_subscribe(app: &App, name: Option<String>, url: Url) -> Result<()> { + if !check_url(&url).await? { + bail!("The url ('{}') does not represent a playlist!", &url) + }; + + let name = if let Some(name) = name { + name + } else { + let yt_opts = match json!( { + "playliststart": 1, + "playlistend": 10, + "noplaylist": false, + "extract_flat": "in_playlist", + }) { + Value::Object(map) => map, + _ => unreachable!("This is hardcoded"), + }; + + let info = yt_dlp::extract_info(&yt_opts, &url, false, false).await?; + + if info._type == Some(InfoType::Playlist) { + info.title.expect("This should be some for a playlist") + } else { + bail!("The url ('{}') does not represent a playlist!", &url) + } + }; + + let present_subscriptions = get_subscriptions(app).await?; + + if let Some(subs) = present_subscriptions.0.get(&name) { + bail!( + "The subscription '{}' could not be added, \ + as another one with the same name ('{}') already exists. It links to the Url: '{}'", + name, + name, + subs.url + ); + } + + let sub = Subscription { name, url }; + + add_subscription(app, &sub).await?; + + Ok(()) +} diff --git a/yt/src/update/mod.rs b/yt/src/update/mod.rs new file mode 100644 index 0000000..6abb8c4 --- /dev/null +++ b/yt/src/update/mod.rs @@ -0,0 +1,257 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{collections::HashMap, process::Stdio, str::FromStr}; + +use anyhow::{Context, Ok, Result}; +use chrono::{DateTime, Utc}; +use log::{error, info, warn}; +use tokio::{ + io::{AsyncBufReadExt, BufReader}, + process::Command, +}; +use url::Url; +use yt_dlp::{unsmuggle_url, wrapper::info_json::InfoJson}; + +use crate::{ + app::App, + storage::{ + subscriptions::{get_subscriptions, Subscription}, + video_database::{ + extractor_hash::ExtractorHash, getters::get_all_hashes, setters::add_video, Video, + VideoStatus, + }, + }, + videos::display::format_video::FormatVideo, +}; + +pub async fn update( + app: &App, + max_backlog: u32, + subs_to_update: Vec<String>, + verbosity: u8, +) -> Result<()> { + let subscriptions = get_subscriptions(app).await?; + let mut back_subs: HashMap<Url, Subscription> = HashMap::new(); + let logging = verbosity > 0; + let log_level = match verbosity { + // 0 => 50, // logging.CRITICAL + 0 => 40, // logging.ERROR + 1 => 30, // logging.WARNING + 2 => 20, // logging.INFO + 3.. => 10, // logging.DEBUG + }; + info!("Passing log_level {} to the update script", log_level); + + let mut urls: Vec<String> = vec![]; + for (name, sub) in subscriptions.0 { + if subs_to_update.contains(&name) || subs_to_update.is_empty() { + urls.push(sub.url.to_string()); + back_subs.insert(sub.url.clone(), sub); + } else { + info!( + "Not updating subscription '{}' as it was not specified", + name + ); + } + } + + // We can get away with not having to re-fetch the hashes every time, as the returned video + // should not contain duplicates. + let hashes = get_all_hashes(app).await?; + + let mut child = Command::new("raw_update.py") + .arg(max_backlog.to_string()) + .arg(urls.len().to_string()) + .arg(log_level.to_string()) + .args(&urls) + .args(hashes.iter().map(|haz| haz.to_string()).collect::<Vec<_>>()) + .stdout(Stdio::piped()) + .stderr(if logging { + Stdio::inherit() + } else { + Stdio::null() + }) + .stdin(Stdio::null()) + .spawn() + .context("Failed to call python3 update_raw")?; + + let mut out = BufReader::new( + child + .stdout + .take() + .expect("Should be able to take child stdout"), + ) + .lines(); + + while let Some(line) = out.next_line().await? { + // use tokio::{fs::File, io::AsyncWriteExt}; + // let mut output = File::create("output.json").await?; + // output.write(line.as_bytes()).await?; + // output.flush().await?; + // output.sync_all().await?; + // drop(output); + + let output_json: HashMap<Url, InfoJson> = + serde_json::from_str(&line).expect("This should be valid json"); + + for (url, value) in output_json { + let sub = back_subs.get(&url).expect("This was stored before"); + process_subscription(app, sub, value, &hashes) + .await + .with_context(|| format!("Failed to process subscription: '{}'", sub.name))? + } + } + + let out = child.wait().await?; + if !out.success() { + error!( + "The update_raw.py invokation failed (exit code: {}).", + out.code() + .map(|f| f.to_string()) + .unwrap_or("<No exit code>".to_owned()) + ) + } + + Ok(()) +} + +pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Result<Video> { + macro_rules! unwrap_option { + ($option:expr) => { + match $option { + Some(x) => x, + None => anyhow::bail!(concat!( + "Expected a value, but '", + stringify!($option), + "' is None!" + )), + } + }; + } + + let publish_date = if let Some(date) = &entry.upload_date { + let year: u32 = date + .chars() + .take(4) + .collect::<String>() + .parse() + .expect("Should work."); + let month: u32 = date + .chars() + .skip(4) + .take(2) + .collect::<String>() + .parse() + .expect("Should work"); + let day: u32 = date + .chars() + .skip(6) + .take(2) + .collect::<String>() + .parse() + .expect("Should work"); + + let date_string = format!("{year:04}-{month:02}-{day:02}T00:00:00Z"); + Some( + DateTime::<Utc>::from_str(&date_string) + .expect("This should always work") + .timestamp(), + ) + } else { + warn!( + "The video '{}' lacks it's upload date!", + unwrap_option!(&entry.title) + ); + None + }; + + let thumbnail_url = match (&entry.thumbnails, &entry.thumbnail) { + (None, None) => None, + (None, Some(thumbnail)) => Some(thumbnail.to_owned()), + + // TODO: The algorithm is not exactly the best <2024-05-28> + (Some(thumbnails), None) => Some( + thumbnails + .first() + .expect("At least one should exist") + .url + .clone(), + ), + (Some(_), Some(thumnail)) => Some(thumnail.to_owned()), + }; + + let url = { + let smug_url: url::Url = unwrap_option!(entry.webpage_url.clone()); + unsmuggle_url(smug_url)? + }; + + let extractor_hash = blake3::hash(unwrap_option!(entry.id).as_bytes()); + + let subscription_name = if let Some(sub) = sub { + Some(sub.name.clone()) + } else { + if let Some(uploader) = entry.uploader { + if entry.webpage_url_domain == Some("youtube.com".to_owned()) { + Some(format!("{} - Videos", uploader)) + } else { + Some(uploader.clone()) + } + } else { + None + } + }; + + let video = Video { + cache_path: None, + description: entry.description.clone(), + duration: entry.duration, + extractor_hash: ExtractorHash::from_hash(extractor_hash), + last_status_change: Utc::now().timestamp(), + parent_subscription_name: subscription_name, + priority: 0, + publish_date, + status: VideoStatus::Pick, + status_change: false, + thumbnail_url, + title: unwrap_option!(entry.title.clone()), + url, + }; + Ok(video) +} + +async fn process_subscription( + app: &App, + sub: &Subscription, + entry: InfoJson, + hashes: &[blake3::Hash], +) -> Result<()> { + let video = + video_entry_to_video(entry, Some(sub)).context("Failed to parse search entry as Video")?; + + if hashes.contains(&video.extractor_hash.hash()) { + // We already stored the video information + unreachable!("The python update script should have never provided us a duplicated video"); + } else { + add_video(app, video.clone()) + .await + .with_context(|| format!("Failed to add video to database: '{}'", video.title))?; + println!( + "{}", + (&video + .to_formatted_video(app) + .await + .with_context(|| format!("Failed to format video: '{}'", video.title))? + .colorize()) + .to_line_display() + ); + Ok(()) + } +} diff --git a/yt/src/videos/display/format_video.rs b/yt/src/videos/display/format_video.rs new file mode 100644 index 0000000..50646a1 --- /dev/null +++ b/yt/src/videos/display/format_video.rs @@ -0,0 +1,166 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::fmt::Display; + +pub trait FormatVideo { + type Output; + + fn cache_path(&self) -> Self::Output; + fn description(&self) -> Self::Output; + fn duration(&self) -> Self::Output; + fn extractor_hash(&self) -> Self::Output; + fn last_status_change(&self) -> Self::Output; + fn parent_subscription_name(&self) -> Self::Output; + fn priority(&self) -> Self::Output; + fn publish_date(&self) -> Self::Output; + fn status(&self) -> Self::Output; + fn status_change(&self) -> Self::Output; + fn thumbnail_url(&self) -> Self::Output; + fn title(&self) -> Self::Output; + fn url(&self) -> Self::Output; + fn video_options(&self) -> Self::Output; + + fn to_parts( + &self, + ) -> ( + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + Self::Output, + ) { + let cache_path = self.cache_path(); + let description = self.description(); + let duration = self.duration(); + let extractor_hash = self.extractor_hash(); + let last_status_change = self.last_status_change(); + let parent_subscription_name = self.parent_subscription_name(); + let priority = self.priority(); + let publish_date = self.publish_date(); + let status = self.status(); + let status_change = self.status_change(); + let thumbnail_url = self.thumbnail_url(); + let title = self.title(); + let url = self.url(); + let video_options = self.video_options(); + + ( + cache_path, + description, + duration, + extractor_hash, + last_status_change, + parent_subscription_name, + priority, + publish_date, + status, + status_change, + thumbnail_url, + title, + url, + video_options, + ) + } + + fn to_info_display(&self) -> String + where + <Self as FormatVideo>::Output: Display, + { + let ( + cache_path, + description, + duration, + extractor_hash, + last_status_change, + parent_subscription_name, + priority, + publish_date, + status, + status_change, + thumbnail_url, + title, + url, + video_options, + ) = self.to_parts(); + + let status_change = if status_change.to_string().as_str() == "false" { + "currently not changing" + } else if status_change.to_string().as_str() == "true" { + "currently changing" + } else { + unreachable!("This is an formatted boolean"); + }; + + let string = format!( + "\ +{title} ({extractor_hash}) +| -> {cache_path} +| -> {duration} +| -> {parent_subscription_name} +| -> priority: {priority} +| -> {publish_date} +| -> status: {status} since {last_status_change} +| -> {status_change} +| -> {thumbnail_url} +| -> {url} +| -> options: {} +{description}\n", + video_options.to_string().trim() + ); + string + } + + fn to_line_display(&self) -> String + where + Self::Output: Display, + { + let f = format!( + "{} {} {} {} {} {}", + self.status(), + self.extractor_hash(), + self.title(), + self.publish_date(), + self.parent_subscription_name(), + self.duration() + ); + + f + } + + fn to_select_file_display(&self) -> String + where + Self::Output: Display, + { + let f = format!( + r#"{}{} {} "{}" "{}" "{}" "{}" "{}"{}"#, + self.status(), + self.video_options(), + self.extractor_hash(), + self.title(), + self.publish_date(), + self.parent_subscription_name(), + self.duration(), + self.url(), + '\n' + ); + + f + } +} diff --git a/yt/src/videos/display/mod.rs b/yt/src/videos/display/mod.rs new file mode 100644 index 0000000..d919dd2 --- /dev/null +++ b/yt/src/videos/display/mod.rs @@ -0,0 +1,314 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::path::PathBuf; + +use chrono::DateTime; +use format_video::FormatVideo; +use owo_colors::OwoColorize; +use url::Url; + +use crate::{ + app::App, + select::selection_file::duration::Duration, + storage::video_database::{getters::get_video_opts, Video}, +}; + +use anyhow::{Context, Result}; + +pub mod format_video; + +macro_rules! get { + ($value:expr, $key:ident, $name:expr, $code:tt) => { + if let Some(value) = &$value.$key { + $code(value) + } else { + concat!("[No ", $name, "]").to_owned() + } + }; +} + +/// This is identical to a [`FormattedVideo`], but has colorized fields. +pub struct ColorizedFormattedVideo(FormattedVideo); + +impl FormattedVideo { + pub fn colorize(self) -> ColorizedFormattedVideo { + let Self { + cache_path, + description, + duration, + extractor_hash, + last_status_change, + parent_subscription_name, + priority, + publish_date, + status, + status_change, + thumbnail_url, + title, + url, + video_options, + } = self; + + ColorizedFormattedVideo(Self { + cache_path: cache_path.blue().bold().to_string(), + description, + duration: duration.cyan().bold().to_string(), + extractor_hash: extractor_hash.bright_purple().italic().to_string(), + last_status_change: last_status_change.bright_cyan().to_string(), + parent_subscription_name: parent_subscription_name.bright_magenta().to_string(), + priority, + publish_date: publish_date.bright_white().bold().to_string(), + status: status.red().bold().to_string(), + status_change, + thumbnail_url, + title: title.green().bold().to_string(), + url: url.italic().to_string(), + video_options: video_options.bright_green().to_string(), + }) + } +} + +/// This is a version of [`Video`] that has all the fields of the original [`Video`] structure +/// turned to [`String`]s to facilitate displaying it. +/// +/// This structure provides a way to display a [`Video`] in a coherent way, as it enforces to +/// always use the same colors for one field. +#[derive(Debug)] +pub struct FormattedVideo { + cache_path: String, + description: String, + duration: String, + extractor_hash: String, + last_status_change: String, + parent_subscription_name: String, + priority: String, + publish_date: String, + status: String, + status_change: String, + thumbnail_url: String, + title: String, + url: String, + /// This string contains the video options (speed, subtitle_languages, etc.). + /// It already starts with an extra whitespace, when these are not empty. + video_options: String, +} + +impl Video { + pub async fn to_formatted_video_owned(self, app: &App) -> Result<FormattedVideo> { + Self::to_formatted_video(&self, app).await + } + + pub async fn to_formatted_video(&self, app: &App) -> Result<FormattedVideo> { + fn date_from_stamp(stamp: i64) -> String { + DateTime::from_timestamp(stamp, 0) + .expect("The timestamps should always be valid") + .format("%Y-%m-%d") + .to_string() + } + + let cache_path: String = get!( + self, + cache_path, + "Cache Path", + (|value: &PathBuf| value.to_string_lossy().to_string()) + ); + let description = get!( + self, + description, + "Description", + (|value: &str| value.to_owned()) + ); + let duration = Duration::from(self.duration); + let extractor_hash = self + .extractor_hash + .into_short_hash(app) + .await + .with_context(|| { + format!( + "Failed to format extractor hash, whilst formatting video: '{}'", + self.title + ) + })?; + let last_status_change = date_from_stamp(self.last_status_change); + let parent_subscription_name = get!( + self, + parent_subscription_name, + "author", + (|sub: &str| sub.replace('"', "'")) + ); + let priority = self.priority; + let publish_date = get!( + self, + publish_date, + "release date", + (|date: &i64| date_from_stamp(*date)) + ); + // TODO: We might support `.trim()`ing that, as the extra whitespace could be bad in the + // selection file. <2024-10-07> + let status = self.status.as_command(); + let status_change = self.status_change; + let thumbnail_url = get!( + self, + thumbnail_url, + "thumbnail URL", + (|url: &Url| url.to_string()) + ); + let title = self.title.replace(['"', '„', '”'], "'"); + let url = self.url.as_str().replace('"', "\\\""); + + let video_options = { + let opts = get_video_opts(app, &self.extractor_hash) + .await + .with_context(|| { + format!("Failed to get video options for video: '{}'", self.title) + })? + .to_cli_flags(app); + let opts_white = if !opts.is_empty() { " " } else { "" }; + format!("{}{}", opts_white, opts) + }; + + Ok(FormattedVideo { + cache_path, + description, + duration: duration.to_string(), + extractor_hash: extractor_hash.to_string(), + last_status_change, + parent_subscription_name, + priority: priority.to_string(), + publish_date, + status: status.to_string(), + status_change: status_change.to_string(), + thumbnail_url, + title, + url, + video_options, + }) + } +} + +impl<'a> FormatVideo for &'a FormattedVideo { + type Output = &'a str; + + fn cache_path(&self) -> Self::Output { + &self.cache_path + } + + fn description(&self) -> Self::Output { + &self.description + } + + fn duration(&self) -> Self::Output { + &self.duration + } + + fn extractor_hash(&self) -> Self::Output { + &self.extractor_hash + } + + fn last_status_change(&self) -> Self::Output { + &self.last_status_change + } + + fn parent_subscription_name(&self) -> Self::Output { + &self.parent_subscription_name + } + + fn priority(&self) -> Self::Output { + &self.priority + } + + fn publish_date(&self) -> Self::Output { + &self.publish_date + } + + fn status(&self) -> Self::Output { + &self.status + } + + fn status_change(&self) -> Self::Output { + &self.status_change + } + + fn thumbnail_url(&self) -> Self::Output { + &self.thumbnail_url + } + + fn title(&self) -> Self::Output { + &self.title + } + + fn url(&self) -> Self::Output { + &self.url + } + + fn video_options(&self) -> Self::Output { + &self.video_options + } +} +impl<'a> FormatVideo for &'a ColorizedFormattedVideo { + type Output = &'a str; + + fn cache_path(&self) -> Self::Output { + &self.0.cache_path + } + + fn description(&self) -> Self::Output { + &self.0.description + } + + fn duration(&self) -> Self::Output { + &self.0.duration + } + + fn extractor_hash(&self) -> Self::Output { + &self.0.extractor_hash + } + + fn last_status_change(&self) -> Self::Output { + &self.0.last_status_change + } + + fn parent_subscription_name(&self) -> Self::Output { + &self.0.parent_subscription_name + } + + fn priority(&self) -> Self::Output { + &self.0.priority + } + + fn publish_date(&self) -> Self::Output { + &self.0.publish_date + } + + fn status(&self) -> Self::Output { + &self.0.status + } + + fn status_change(&self) -> Self::Output { + &self.0.status_change + } + + fn thumbnail_url(&self) -> Self::Output { + &self.0.thumbnail_url + } + + fn title(&self) -> Self::Output { + &self.0.title + } + + fn url(&self) -> Self::Output { + &self.0.url + } + + fn video_options(&self) -> Self::Output { + &self.0.video_options + } +} diff --git a/yt/src/videos/mod.rs b/yt/src/videos/mod.rs new file mode 100644 index 0000000..59baa8c --- /dev/null +++ b/yt/src/videos/mod.rs @@ -0,0 +1,66 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use anyhow::Result; +use display::{format_video::FormatVideo, FormattedVideo}; +use futures::{stream::FuturesUnordered, TryStreamExt}; +use nucleo_matcher::{ + pattern::{CaseMatching, Normalization, Pattern}, + Matcher, +}; + +pub mod display; + +use crate::{ + app::App, + storage::video_database::{getters::get_videos, VideoStatus}, +}; + +pub async fn query(app: &App, limit: Option<usize>, search_query: Option<String>) -> Result<()> { + let all_videos = get_videos(app, VideoStatus::ALL, None).await?; + + // turn one video to a color display, to pre-warm the hash shrinking cache + if let Some(val) = all_videos.first() { + val.to_formatted_video(app).await?; + } + + let limit = limit.unwrap_or(all_videos.len()); + + let all_video_strings: Vec<String> = all_videos + .into_iter() + .take(limit) + .map(|vid| vid.to_formatted_video_owned(app)) + .collect::<FuturesUnordered<_>>() + .try_collect::<Vec<FormattedVideo>>() + .await? + .into_iter() + .map(|vid| (&vid.colorize()).to_line_display()) + .collect(); + + if let Some(query) = search_query { + let mut matcher = Matcher::new(nucleo_matcher::Config::DEFAULT.match_paths()); + + let matches = Pattern::parse( + &query.replace(' ', "\\ "), + CaseMatching::Ignore, + Normalization::Smart, + ) + .match_list(all_video_strings, &mut matcher); + + matches + .iter() + .rev() + .for_each(|(val, key)| println!("{} ({})", val, key)); + } else { + println!("{}", all_video_strings.join("\n")) + } + + Ok(()) +} diff --git a/yt/src/watch/events/mod.rs b/yt/src/watch/events/mod.rs new file mode 100644 index 0000000..41a7772 --- /dev/null +++ b/yt/src/watch/events/mod.rs @@ -0,0 +1,369 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::{collections::HashMap, env::current_exe, mem, time::Duration}; + +use anyhow::{bail, Result}; +use libmpv2::{ + events::{Event, PlaylistEntryId}, + EndFileReason, Mpv, +}; +use log::{debug, info, warn}; +use tokio::{process::Command, time}; + +use crate::{ + app::App, + comments::get_comments, + storage::video_database::{ + extractor_hash::ExtractorHash, + getters::{get_video_by_hash, get_video_mpv_opts, get_videos}, + setters::{set_state_change, set_video_watched}, + VideoStatus, + }, +}; + +use playlist_handler::PlaylistHandler; + +mod playlist_handler; + +#[derive(Debug)] +pub struct MpvEventHandler { + watch_later_block_list: HashMap<ExtractorHash, ()>, + playlist_handler: PlaylistHandler, +} + +impl MpvEventHandler { + pub fn from_playlist(playlist_cache: HashMap<String, ExtractorHash>) -> Self { + let playlist_handler = PlaylistHandler::from_cache(playlist_cache); + Self { + playlist_handler, + watch_later_block_list: HashMap::new(), + } + } + + /// Checks, whether new videos are ready to be played + pub async fn possibly_add_new_videos( + &mut self, + app: &App, + mpv: &Mpv, + force_message: bool, + ) -> Result<usize> { + let play_things = get_videos(app, &[VideoStatus::Cached], Some(false)).await?; + + // There is nothing to watch + if play_things.is_empty() { + if force_message { + Self::message(mpv, "No new videos available to add", "3000")?; + } + return Ok(0); + } + + let mut blocked_videos = 0; + let current_playlist = self.playlist_handler.playlist_ids(mpv)?; + let play_things = play_things + .into_iter() + .filter(|val| !current_playlist.values().any(|a| a == &val.extractor_hash)) + .filter(|val| { + if self + .watch_later_block_list + .contains_key(&val.extractor_hash) + { + blocked_videos += 1; + false + } else { + true + } + }) + .collect::<Vec<_>>(); + + info!( + "{} videos are cached and will be added to the list to be played ({} are blocked)", + play_things.len(), + blocked_videos + ); + + let num = play_things.len(); + self.playlist_handler.reserve(play_things.len()); + for play_thing in play_things { + debug!("Adding '{}' to playlist.", play_thing.title); + + let orig_cache_path = play_thing.cache_path.expect("Is cached and thus some"); + let cache_path = orig_cache_path.to_str().expect("Should be vaild utf8"); + let fmt_cache_path = format!("\"{}\"", cache_path); + + let args = &[&fmt_cache_path, "append-play"]; + + mpv.execute("loadfile", args)?; + self.playlist_handler + .add(cache_path.to_owned(), play_thing.extractor_hash); + } + + if force_message || num > 0 { + Self::message( + mpv, + format!( + "Added {} videos ({} are marked as watch later)", + num, blocked_videos + ) + .as_str(), + "3000", + )?; + } + Ok(num) + } + + fn message(mpv: &Mpv, message: &str, time: &str) -> Result<()> { + mpv.execute("show-text", &[format!("\"{}\"", message).as_str(), time])?; + Ok(()) + } + + /// Get the hash of the currently playing video. + /// You can specify an offset, which is added to the playlist_position to get, for example, the + /// previous video (-1) or the next video (+1). + /// Beware that setting an offset can cause an property error if it's out of bound. + fn get_cvideo_hash(&mut self, mpv: &Mpv, offset: i64) -> Result<ExtractorHash> { + let playlist_entry_id = { + let playlist_position = { + let raw = mpv.get_property::<i64>("playlist-pos")?; + if raw == -1 { + unreachable!( "This should only be called when a current video exists. Current state: '{:#?}'", self); + } else { + (raw + offset) as usize + } + }; + + let raw = + mpv.get_property::<i64>(format!("playlist/{}/id", playlist_position).as_str())?; + PlaylistEntryId::new(raw) + }; + + // debug!("Trying to get playlist entry: '{}'", playlist_entry_id); + + let video_hash = self + .playlist_handler + .playlist_ids(mpv)? + .get(&playlist_entry_id) + .expect("The stored playling index should always be in the playlist") + .to_owned(); + + Ok(video_hash) + } + async fn mark_video_watched(&self, app: &App, hash: &ExtractorHash) -> Result<()> { + let video = get_video_by_hash(app, hash).await?; + debug!("MPV handler will mark video '{}' watched.", video.title); + set_video_watched(app, &video).await?; + Ok(()) + } + + async fn mark_video_inactive( + &mut self, + app: &App, + mpv: &Mpv, + playlist_index: PlaylistEntryId, + ) -> Result<()> { + let current_playlist = self.playlist_handler.playlist_ids(mpv)?; + let video_hash = current_playlist + .get(&playlist_index) + .expect("The video index should always be correctly tracked"); + + set_state_change(app, video_hash, false).await?; + Ok(()) + } + async fn mark_video_active( + &mut self, + app: &App, + mpv: &Mpv, + playlist_index: PlaylistEntryId, + ) -> Result<()> { + let current_playlist = self.playlist_handler.playlist_ids(mpv)?; + let video_hash = current_playlist + .get(&playlist_index) + .expect("The video index should always be correctly tracked"); + + set_state_change(app, video_hash, true).await?; + Ok(()) + } + + /// Apply the options set with e.g. `watch --speed=<speed>` + async fn apply_options(&self, app: &App, mpv: &Mpv, hash: &ExtractorHash) -> Result<()> { + let options = get_video_mpv_opts(app, hash).await?; + + mpv.set_property("speed", options.playback_speed)?; + Ok(()) + } + + /// This also returns the hash of the current video + fn remove_cvideo_from_playlist(&mut self, mpv: &Mpv) -> Result<ExtractorHash> { + let hash = self.get_cvideo_hash(mpv, 0)?; + mpv.execute("playlist-remove", &["current"])?; + Ok(hash) + } + + /// Check if the playback queue is empty + pub async fn check_idle(&mut self, app: &App, mpv: &Mpv) -> Result<bool> { + if mpv.get_property::<bool>("idle-active")? { + warn!("There is nothing to watch yet. Will idle, until something is available"); + let number_of_new_videos = self.possibly_add_new_videos(app, mpv, false).await?; + + if number_of_new_videos == 0 { + time::sleep(Duration::from_secs(10)).await; + Ok(true) + } else { + Ok(false) + } + } else { + Ok(false) + } + } + + /// This will return [`true`], if the event handling should be stopped + pub async fn handle_mpv_event<'a>( + &mut self, + app: &App, + mpv: &Mpv, + event: Event<'a>, + ) -> Result<bool> { + match event { + Event::EndFile(r) => match r.reason { + EndFileReason::Eof => { + info!("Mpv reached eof of current video. Marking it inactive."); + + self.mark_video_inactive(app, mpv, r.playlist_entry_id) + .await?; + } + EndFileReason::Stop => { + // This reason is incredibly ambiguous. It _both_ means actually pausing a + // video and going to the next one in the playlist. + // Oh, and it's also called, when a video is removed from the playlist (at + // least via "playlist-remove current") + info!("Paused video (or went to next playlist entry); Marking it inactive"); + + self.mark_video_inactive(app, mpv, r.playlist_entry_id) + .await?; + } + EndFileReason::Quit => { + info!("Mpv quit. Exiting playback"); + + // draining the playlist is okay, as mpv is done playing + let mut handler = mem::take(&mut self.playlist_handler); + let videos = handler.playlist_ids(mpv)?; + for hash in videos.values() { + self.mark_video_watched(app, hash).await?; + set_state_change(app, hash, false).await?; + } + return Ok(true); + } + EndFileReason::Error => { + unreachable!("This will be raised as a separate error") + } + EndFileReason::Redirect => { + todo!("We probably need to handle this somehow"); + } + }, + Event::StartFile(entry_id) => { + self.possibly_add_new_videos(app, mpv, false).await?; + + // We don't need to check, whether other videos are still active, as they should + // have been marked inactive in the `Stop` handler. + self.mark_video_active(app, mpv, entry_id).await?; + let hash = self.get_cvideo_hash(mpv, 0)?; + self.apply_options(app, mpv, &hash).await?; + } + Event::ClientMessage(a) => { + debug!("Got Client Message event: '{}'", a.join(" ")); + + match a.as_slice() { + &["yt-comments-external"] => { + let binary = current_exe().expect("A current exe should exist"); + + let status = Command::new("riverctl") + .args(["focus-output", "next"]) + .status() + .await?; + if !status.success() { + bail!("focusing the next output failed!"); + } + + let status = Command::new("alacritty") + .args([ + "--title", + "floating please", + "--command", + binary.to_str().expect("Should be valid unicode"), + "--db-path", + app.config + .paths + .database_path + .to_str() + .expect("This should be convertible?"), + "comments", + ]) + .status() + .await?; + if !status.success() { + bail!("Falied to start `yt comments`"); + } + + let status = Command::new("riverctl") + .args(["focus-output", "next"]) + .status() + .await?; + if !status.success() { + bail!("focusing the next output failed!"); + } + } + &["yt-comments-local"] => { + let comments: String = get_comments(app) + .await? + .render(false) + .replace("\"", "") + .replace("'", "") + .chars() + .take(app.config.watch.local_comments_length) + .collect(); + + Self::message(mpv, &comments, "6000")?; + } + &["yt-description"] => { + // let description = description(app).await?; + Self::message(mpv, "<YT Description>", "6000")?; + } + &["yt-mark-watch-later"] => { + mpv.execute("write-watch-later-config", &[])?; + + let hash = self.remove_cvideo_from_playlist(mpv)?; + assert_eq!( + self.watch_later_block_list.insert(hash, ()), + None, + "A video should not be blocked *and* in the playlist" + ); + + Self::message(mpv, "Marked the video to be watched later", "3000")?; + } + &["yt-mark-done-and-go-next"] => { + let cvideo_hash = self.remove_cvideo_from_playlist(mpv)?; + self.mark_video_watched(app, &cvideo_hash).await?; + + Self::message(mpv, "Marked the video watched", "3000")?; + } + &["yt-check-new-videos"] => { + self.possibly_add_new_videos(app, mpv, true).await?; + } + other => { + debug!("Unknown message: {}", other.join(" ")) + } + } + } + _ => {} + } + + Ok(false) + } +} diff --git a/yt/src/watch/events/playlist_handler.rs b/yt/src/watch/events/playlist_handler.rs new file mode 100644 index 0000000..0933856 --- /dev/null +++ b/yt/src/watch/events/playlist_handler.rs @@ -0,0 +1,94 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::collections::HashMap; + +use anyhow::Result; +use libmpv2::{events::PlaylistEntryId, mpv_node::MpvNode, Mpv}; + +use crate::storage::video_database::extractor_hash::ExtractorHash; + +#[derive(Debug, Default)] +pub struct PlaylistHandler { + /// A map of the original file paths to the videos extractor hashes. + /// Used to get the extractor hash from a video returned by mpv + playlist_cache: HashMap<String, ExtractorHash>, + + /// A map of the playlist_entry_id field to their corresponding extractor hashes. + playlist_ids: HashMap<PlaylistEntryId, ExtractorHash>, +} +impl PlaylistHandler { + pub fn from_cache(cache: HashMap<String, ExtractorHash>) -> Self { + Self { + playlist_cache: cache, + playlist_ids: HashMap::new(), + } + } + + pub fn reserve(&mut self, len: usize) { + self.playlist_cache.reserve(len) + } + pub fn add(&mut self, cache_path: String, extractor_hash: ExtractorHash) { + assert_eq!( + self.playlist_cache.insert(cache_path, extractor_hash), + None, + "Only new video should ever be added" + ); + } + + pub fn playlist_ids(&mut self, mpv: &Mpv) -> Result<&HashMap<PlaylistEntryId, ExtractorHash>> { + let mpv_playlist: Vec<(String, PlaylistEntryId)> = match mpv.get_property("playlist")? { + MpvNode::ArrayIter(array) => array + .map(|val| match val { + MpvNode::MapIter(map) => { + struct BuildPlaylistEntry { + filename: Option<String>, + id: Option<PlaylistEntryId>, + } + let mut entry = BuildPlaylistEntry { + filename: None, + id: None, + }; + + map.for_each(|(key, value)| match key.as_str() { + "filename" => { + entry.filename = Some(value.str().expect("work").to_owned()) + } + "id" => { + entry.id = Some(PlaylistEntryId::new(value.i64().expect("Works"))) + } + _ => (), + }); + (entry.filename.expect("is some"), entry.id.expect("is some")) + } + _ => unreachable!(), + }) + .collect(), + _ => unreachable!(), + }; + + let mut playlist: HashMap<PlaylistEntryId, ExtractorHash> = + HashMap::with_capacity(mpv_playlist.len()); + for (path, key) in mpv_playlist { + let hash = self + .playlist_cache + .get(&path) + .expect("All path should also be stored in the cache") + .to_owned(); + playlist.insert(key, hash); + } + + for (id, hash) in playlist { + self.playlist_ids.entry(id).or_insert(hash); + } + + Ok(&self.playlist_ids) + } +} diff --git a/yt/src/watch/mod.rs b/yt/src/watch/mod.rs new file mode 100644 index 0000000..3bcf1fc --- /dev/null +++ b/yt/src/watch/mod.rs @@ -0,0 +1,117 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de> +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>. + +use std::collections::HashMap; + +use anyhow::Result; +use events::MpvEventHandler; +use libmpv2::{events::EventContext, Mpv}; +use log::{debug, info, warn}; + +use crate::{ + app::App, + cache::maintain, + storage::video_database::{extractor_hash::ExtractorHash, getters::get_videos, VideoStatus}, +}; + +pub mod events; + +pub async fn watch(app: &App) -> Result<()> { + maintain(app, false).await?; + + // set some default values, to make things easier (these can be overridden by the config file, + // which we load later) + let mpv = Mpv::with_initializer(|mpv| { + // Enable default key bindings, so the user can actually interact with + // the player (and e.g. close the window). + mpv.set_property("input-default-bindings", "yes")?; + mpv.set_property("input-vo-keyboard", "yes")?; + + // Show the on screen controller. + mpv.set_property("osc", "yes")?; + + // Don't automatically advance to the next video (or exit the player) + mpv.set_option("keep-open", "always")?; + Ok(()) + })?; + + let config_path = &app.config.paths.mpv_config_path; + if config_path.try_exists()? { + info!("Found mpv.conf at '{}'!", config_path.display()); + mpv.execute( + "load-config-file", + &[config_path.to_str().expect("This should be utf8-able")], + )?; + } else { + warn!( + "Did not find a mpv.conf file at '{}'", + config_path.display() + ); + } + + let input_path = &app.config.paths.mpv_input_path; + if input_path.try_exists()? { + info!("Found mpv.input.conf at '{}'!", input_path.display()); + mpv.execute( + "load-input-conf", + &[input_path.to_str().expect("This should be utf8-able")], + )?; + } else { + warn!( + "Did not find a mpv.input.conf file at '{}'", + input_path.display() + ); + } + + let mut ev_ctx = EventContext::new(mpv.ctx); + ev_ctx.disable_deprecated_events()?; + + let play_things = get_videos(app, &[VideoStatus::Cached], Some(false)).await?; + info!( + "{} videos are cached and ready to be played", + play_things.len() + ); + + let mut playlist_cache: HashMap<String, ExtractorHash> = + HashMap::with_capacity(play_things.len()); + + for play_thing in play_things { + debug!("Adding '{}' to playlist.", play_thing.title); + + let orig_cache_path = play_thing.cache_path.expect("Is cached and thus some"); + let cache_path = orig_cache_path.to_str().expect("Should be vaild utf8"); + let fmt_cache_path = format!("\"{}\"", cache_path); + + let args = &[&fmt_cache_path, "append-play"]; + + mpv.execute("loadfile", args)?; + + playlist_cache.insert(cache_path.to_owned(), play_thing.extractor_hash); + } + + let mut mpv_event_handler = MpvEventHandler::from_playlist(playlist_cache); + loop { + while mpv_event_handler.check_idle(app, &mpv).await? {} + + if let Some(ev) = ev_ctx.wait_event(600.) { + match ev { + Ok(event) => { + debug!("Mpv event triggered: {:#?}", event); + if mpv_event_handler.handle_mpv_event(app, &mpv, event).await? { + break; + } + } + Err(e) => debug!("Mpv Event errored: {}", e), + } + } + } + + Ok(()) +} |