about summary refs log tree commit diff stats
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/fmt/Cargo.toml30
-rw-r--r--crates/fmt/LICENSE18
-rw-r--r--crates/fmt/LICENSE.license10
-rw-r--r--crates/fmt/src/fmt.rs137
-rw-r--r--crates/fmt/src/linebreak.rs520
-rw-r--r--crates/fmt/src/parasplit.rs629
-rw-r--r--crates/libmpv2/CHANGELOG.md8
-rw-r--r--crates/libmpv2/Cargo.toml1
-rw-r--r--crates/libmpv2/examples/events.rs34
-rw-r--r--crates/libmpv2/examples/opengl.rs2
-rw-r--r--crates/libmpv2/libmpv2-sys/Cargo.toml2
-rw-r--r--crates/libmpv2/libmpv2-sys/build.rs4
-rw-r--r--crates/libmpv2/src/lib.rs4
-rw-r--r--crates/libmpv2/src/mpv.rs41
-rw-r--r--crates/libmpv2/src/mpv/errors.rs95
-rw-r--r--crates/libmpv2/src/mpv/events.rs52
-rw-r--r--crates/libmpv2/src/mpv/protocol.rs135
-rw-r--r--crates/libmpv2/src/mpv/raw_error_warning.txt5
-rw-r--r--crates/libmpv2/src/mpv/raw_error_warning.txt.license9
-rw-r--r--crates/libmpv2/src/mpv/render.rs52
-rw-r--r--crates/termsize/.gitignore12
-rw-r--r--crates/termsize/Cargo.toml36
-rw-r--r--crates/termsize/LICENSE20
-rw-r--r--crates/termsize/LICENSE.license9
-rw-r--r--crates/termsize/README.md51
-rw-r--r--crates/termsize/src/lib.rs52
-rw-r--r--crates/termsize/src/nix.rs100
-rw-r--r--crates/termsize/src/other.rs14
-rw-r--r--crates/termsize/src/win.rs52
-rw-r--r--crates/yt/Cargo.toml66
-rw-r--r--crates/yt/src/ansi_escape_codes.rs36
-rw-r--r--crates/yt/src/app.rs50
-rw-r--r--crates/yt/src/cache/mod.rs105
-rw-r--r--crates/yt/src/cli.rs504
-rw-r--r--crates/yt/src/comments/comment.rs152
-rw-r--r--crates/yt/src/comments/description.rs46
-rw-r--r--crates/yt/src/comments/display.rs118
-rw-r--r--crates/yt/src/comments/mod.rs167
-rw-r--r--crates/yt/src/comments/output.rs53
-rw-r--r--crates/yt/src/config/default.rs110
-rw-r--r--crates/yt/src/config/definitions.rs67
-rw-r--r--crates/yt/src/config/file_system.rs120
-rw-r--r--crates/yt/src/config/mod.rs76
-rw-r--r--crates/yt/src/constants.rs12
-rw-r--r--crates/yt/src/download/download_options.rs118
-rw-r--r--crates/yt/src/download/mod.rs369
-rw-r--r--crates/yt/src/download/progress_hook.rs198
-rw-r--r--crates/yt/src/main.rs307
-rw-r--r--crates/yt/src/select/cmds/add.rs193
-rw-r--r--crates/yt/src/select/cmds/mod.rs113
-rw-r--r--crates/yt/src/select/mod.rs321
-rw-r--r--crates/yt/src/select/selection_file/duration.rs246
-rw-r--r--crates/yt/src/select/selection_file/help.str12
-rw-r--r--crates/yt/src/select/selection_file/help.str.license10
-rw-r--r--crates/yt/src/select/selection_file/mod.rs42
-rw-r--r--crates/yt/src/status/mod.rs130
-rw-r--r--crates/yt/src/storage/migrate/mod.rs279
-rw-r--r--crates/yt/src/storage/migrate/sql/0_Empty_to_Zero.sql72
-rw-r--r--crates/yt/src/storage/migrate/sql/1_Zero_to_One.sql28
-rw-r--r--crates/yt/src/storage/migrate/sql/2_One_to_Two.sql11
-rw-r--r--crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql85
-rw-r--r--crates/yt/src/storage/mod.rs (renamed from crates/yt_dlp/src/wrapper/mod.rs)6
-rw-r--r--crates/yt/src/storage/subscriptions.rs141
-rw-r--r--crates/yt/src/storage/video_database/downloader.rs130
-rw-r--r--crates/yt/src/storage/video_database/extractor_hash.rs163
-rw-r--r--crates/yt/src/storage/video_database/get/mod.rs307
-rw-r--r--crates/yt/src/storage/video_database/get/playlist/iterator.rs101
-rw-r--r--crates/yt/src/storage/video_database/get/playlist/mod.rs167
-rw-r--r--crates/yt/src/storage/video_database/mod.rs329
-rw-r--r--crates/yt/src/storage/video_database/notify.rs77
-rw-r--r--crates/yt/src/storage/video_database/set/mod.rs327
-rw-r--r--crates/yt/src/storage/video_database/set/playlist.rs101
-rw-r--r--crates/yt/src/subscribe/mod.rs193
-rw-r--r--crates/yt/src/unreachable.rs50
-rw-r--r--crates/yt/src/update/mod.rs204
-rw-r--r--crates/yt/src/update/updater.rs194
-rw-r--r--crates/yt/src/version/mod.rs52
-rw-r--r--crates/yt/src/videos/display/format_video.rs94
-rw-r--r--crates/yt/src/videos/display/mod.rs229
-rw-r--r--crates/yt/src/videos/mod.rs54
-rw-r--r--crates/yt/src/watch/mod.rs178
-rw-r--r--crates/yt/src/watch/playlist.rs99
-rw-r--r--crates/yt/src/watch/playlist_handler/client_messages/mod.rs99
-rw-r--r--crates/yt/src/watch/playlist_handler/mod.rs342
-rw-r--r--crates/yt_dlp/Cargo.toml22
-rw-r--r--crates/yt_dlp/README.md2
-rw-r--r--crates/yt_dlp/src/duration.rs78
-rw-r--r--crates/yt_dlp/src/info_json.rs60
-rw-r--r--crates/yt_dlp/src/lib.rs701
-rw-r--r--crates/yt_dlp/src/logging.rs123
-rw-r--r--crates/yt_dlp/src/options.rs286
-rw-r--r--crates/yt_dlp/src/package_hacks/mod.rs11
-rw-r--r--crates/yt_dlp/src/package_hacks/urllib3.rs35
-rw-r--r--crates/yt_dlp/src/package_hacks/urllib3_polyfill.py (renamed from crates/yt_dlp/.cargo/config.toml)7
-rw-r--r--crates/yt_dlp/src/post_processors/dearrow.rs184
-rw-r--r--crates/yt_dlp/src/post_processors/mod.rs123
-rw-r--r--crates/yt_dlp/src/progress_hook.rs54
-rw-r--r--crates/yt_dlp/src/python_error.rs116
-rw-r--r--crates/yt_dlp/src/tests.rs85
-rw-r--r--crates/yt_dlp/src/wrapper/info_json.rs556
-rw-r--r--crates/yt_dlp/src/wrapper/yt_dlp_options.rs62
101 files changed, 11056 insertions, 1436 deletions
diff --git a/crates/fmt/Cargo.toml b/crates/fmt/Cargo.toml
new file mode 100644
index 0000000..f3cf4ad
--- /dev/null
+++ b/crates/fmt/Cargo.toml
@@ -0,0 +1,30 @@
+# yt - A fully featured command line YouTube client
+#
+# Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+# Copyright (C) 2025 uutils developers
+# SPDX-License-Identifier: MIT
+#
+# This file is part of Yt.
+#
+# You should have received a copy of the License along with this program.
+# If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+[package]
+name = "uu_fmt"
+authors = ["uutils developers", "Benedikt Peetz <benedikt.peetz@b-peetz.de>"]
+license = "MIT"
+description = "A fork of the uutils fmt tool. This fork is a library instead of a binary."
+version.workspace = true
+edition.workspace = true
+repository.workspace = true
+rust-version.workspace = true
+publish = false
+
+[lib]
+path = "src/fmt.rs"
+
+[dependencies]
+unicode-width = "0.2.1"
+
+[lints]
+workspace = true
diff --git a/crates/fmt/LICENSE b/crates/fmt/LICENSE
new file mode 100644
index 0000000..21bd444
--- /dev/null
+++ b/crates/fmt/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) uutils developers
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/crates/fmt/LICENSE.license b/crates/fmt/LICENSE.license
new file mode 100644
index 0000000..6cee99d
--- /dev/null
+++ b/crates/fmt/LICENSE.license
@@ -0,0 +1,10 @@
+yt - A fully featured command line YouTube client
+
+Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+Copyright (C) 2025 uutils developers
+SPDX-License-Identifier: MIT
+
+This file is part of Yt.
+
+You should have received a copy of the License along with this program.
+If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
diff --git a/crates/fmt/src/fmt.rs b/crates/fmt/src/fmt.rs
new file mode 100644
index 0000000..3067bea
--- /dev/null
+++ b/crates/fmt/src/fmt.rs
@@ -0,0 +1,137 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 uutils developers
+// SPDX-License-Identifier: MIT
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+// This file is part of the uutils coreutils package.
+//
+// For the full copyright and license information, please view the LICENSE
+// file that was distributed with this source code.
+
+use std::fmt::Write;
+
+use linebreak::break_lines;
+use parasplit::ParagraphStream;
+
+mod linebreak;
+mod parasplit;
+
+#[derive(Debug)]
+#[allow(clippy::struct_excessive_bools)]
+pub struct FmtOptions {
+    /// First and second line of paragraph
+    /// may have different indentations, in which
+    /// case the first line's indentation is preserved,
+    /// and each subsequent line's indentation matches the second line.
+    pub crown_margin: bool,
+
+    /// Like the [`crown_margin`], except that the first and second line of a paragraph *must*
+    /// have different indentation or they are treated as separate paragraphs.
+    pub tagged_paragraph: bool,
+
+    /// Attempt to detect and preserve mail headers in the input.
+    /// Be careful when combining this with [`prefix`].
+    pub mail: bool,
+
+    /// Split lines only, do not reflow.
+    pub split_only: bool,
+
+    /// Insert exactly one space between words, and two between sentences.
+    /// Sentence breaks in the input are detected as [?!.] followed by two spaces or a newline;
+    /// other punctuation is not interpreted as a sentence break.
+    pub uniform: bool,
+
+    /// Reformat only lines beginning with PREFIX, reattaching PREFIX to reformatted lines.
+    /// Unless [`exact_prefix`] is specified, leading whitespace will be ignored when matching PREFIX.
+    pub prefix: Option<String>,
+
+    /// Do not reformat lines beginning with ``ANTI_PREFIX``.
+    /// Unless [`exact_anti_prefix`] is specified, leading whitespace will be ignored when matching ``ANTI_PREFIX``.
+    pub anti_prefix: Option<String>,
+
+    /// [`prefix`] must match at the beginning of the line with no preceding whitespace.
+    pub exact_prefix: bool,
+
+    /// [`anti_prefix`] must match at the beginning of the line with no preceding whitespace.
+    pub exact_anti_prefix: bool,
+
+    /// Fill output lines up to a maximum of WIDTH columns, default 75.
+    pub width: usize,
+
+    /// Goal width, default of 93% of WIDTH.
+    /// Must be less than or equal to WIDTH.
+    pub goal: usize,
+
+    /// Break lines more quickly at the expense of a potentially more ragged appearance.
+    pub quick: bool,
+
+    /// Treat tabs as TABWIDTH spaces for determining line length, default 8.
+    /// Note that this is used only for calculating line lengths; tabs are preserved in the output.
+    pub tabwidth: usize,
+}
+
+impl FmtOptions {
+    #[must_use]
+    #[allow(clippy::cast_sign_loss)]
+    #[allow(clippy::cast_possible_truncation)]
+    #[allow(clippy::cast_precision_loss)]
+    pub fn new(width: Option<usize>, goal: Option<usize>, tabwidth: Option<usize>) -> Self {
+        // by default, goal is 93% of width
+        const DEFAULT_GOAL_TO_WIDTH_RATIO: f64 = 0.93;
+        const DEFAULT_WIDTH: usize = 75;
+
+        FmtOptions {
+            crown_margin: false,
+            tagged_paragraph: false,
+            mail: false,
+            split_only: false,
+            uniform: false,
+            prefix: None,
+            anti_prefix: None,
+            exact_prefix: false,
+            exact_anti_prefix: false,
+            width: width.unwrap_or(DEFAULT_WIDTH),
+            goal: goal.unwrap_or(
+                ((width.unwrap_or(DEFAULT_WIDTH) as f64) * DEFAULT_GOAL_TO_WIDTH_RATIO).floor()
+                    as usize,
+            ),
+            quick: false,
+            tabwidth: tabwidth.unwrap_or(8),
+        }
+    }
+}
+
+/// Process text and format it according to the provided options.
+///
+/// # Arguments
+///
+/// * `text` - The text to process.
+/// * `fmt_opts` - A reference to a [`FmtOptions`] structure containing the formatting options.
+///
+/// # Returns
+///
+/// The formatted [`String`].
+#[must_use]
+pub fn process_text(text: &str, fmt_opts: &FmtOptions) -> String {
+    let mut output = String::new();
+
+    let p_stream = ParagraphStream::new(fmt_opts, text);
+    for para_result in p_stream {
+        match para_result {
+            Err(s) => {
+                output.push_str(&s);
+                output.push('\n');
+            }
+            Ok(para) => write!(output, "{}", break_lines(&para, fmt_opts))
+                .expect("This is in-memory. It should not fail"),
+        }
+    }
+
+    output
+}
diff --git a/crates/fmt/src/linebreak.rs b/crates/fmt/src/linebreak.rs
new file mode 100644
index 0000000..b1dc6fa
--- /dev/null
+++ b/crates/fmt/src/linebreak.rs
@@ -0,0 +1,520 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 uutils developers
+// SPDX-License-Identifier: MIT
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+// This file is part of the uutils coreutils package.
+//
+// For the full copyright and license information, please view the LICENSE
+// file that was distributed with this source code.
+
+use std::fmt::Write;
+use std::{cmp, mem};
+
+use crate::FmtOptions;
+use crate::parasplit::{ParaWords, Paragraph, WordInfo};
+
+struct BreakArgs<'a> {
+    opts: &'a FmtOptions,
+    init_len: usize,
+    indent_str: &'a str,
+    indent_len: usize,
+    uniform: bool,
+    output: String,
+}
+
+impl BreakArgs<'_> {
+    fn compute_width(&self, winfo: &WordInfo<'_>, position_n: usize, fresh: bool) -> usize {
+        if fresh {
+            0
+        } else {
+            let post = winfo.after_tab;
+            match winfo.before_tab {
+                None => post,
+                Some(pre) => {
+                    post + ((pre + position_n) / self.opts.tabwidth + 1) * self.opts.tabwidth
+                        - position_n
+                }
+            }
+        }
+    }
+}
+
+pub(super) fn break_lines(para: &Paragraph, opts: &FmtOptions) -> String {
+    let mut output = String::new();
+
+    // indent
+    let p_indent = &para.indent_str;
+    let p_indent_len = para.indent_len;
+
+    // words
+    let p_words = ParaWords::new(opts, para);
+    let mut p_words_words = p_words.words();
+
+    // the first word will *always* appear on the first line
+    // make sure of this here
+    let Some(winfo) = p_words_words.next() else {
+        return "\n".to_owned();
+    };
+
+    // print the init, if it exists, and get its length
+    let p_init_len = winfo.word_nchars
+        + if opts.crown_margin || opts.tagged_paragraph {
+            // handle "init" portion
+            output.push_str(&para.init_str);
+            para.init_len
+        } else if !para.mail_header {
+            // for non-(crown, tagged) that's the same as a normal indent
+            output.push_str(p_indent);
+            p_indent_len
+        } else {
+            // except that mail headers get no indent at all
+            0
+        };
+
+    // write first word after writing init
+    write!(output, "{}", winfo.word).expect("Works");
+
+    // does this paragraph require uniform spacing?
+    let uniform = para.mail_header || opts.uniform;
+
+    let mut break_args = BreakArgs {
+        opts,
+        init_len: p_init_len,
+        indent_str: p_indent,
+        indent_len: p_indent_len,
+        uniform,
+        output,
+    };
+
+    if opts.quick || para.mail_header {
+        break_simple(p_words_words, &mut break_args);
+    } else {
+        break_knuth_plass(p_words_words, &mut break_args);
+    };
+
+    break_args.output
+}
+
+// break_simple implements a "greedy" breaking algorithm: print words until
+// maxlength would be exceeded, then print a linebreak and indent and continue.
+fn break_simple<'a, T: Iterator<Item = &'a WordInfo<'a>>>(iter: T, args: &mut BreakArgs<'a>) {
+    iter.fold((args.init_len, false), |(l, prev_punct), winfo| {
+        accum_words_simple(args, l, prev_punct, winfo)
+    });
+    args.output.push('\n');
+}
+
+fn accum_words_simple<'a>(
+    args: &mut BreakArgs<'a>,
+    l: usize,
+    prev_punct: bool,
+    winfo: &'a WordInfo<'a>,
+) -> (usize, bool) {
+    // compute the length of this word, considering how tabs will expand at this position on the line
+    let wlen = winfo.word_nchars + args.compute_width(winfo, l, false);
+
+    let slen = compute_slen(
+        args.uniform,
+        winfo.new_line,
+        winfo.sentence_start,
+        prev_punct,
+    );
+
+    if l + wlen + slen > args.opts.width {
+        write_newline(args.indent_str, &mut args.output);
+        write_with_spaces(&winfo.word[winfo.word_start..], 0, &mut args.output);
+        (args.indent_len + winfo.word_nchars, winfo.ends_punct)
+    } else {
+        write_with_spaces(winfo.word, slen, &mut args.output);
+        (l + wlen + slen, winfo.ends_punct)
+    }
+}
+
+// break_knuth_plass implements an "optimal" breaking algorithm in the style of
+//    Knuth, D.E., and Plass, M.F. "Breaking Paragraphs into Lines." in Software,
+//    Practice and Experience. Vol. 11, No. 11, November 1981.
+//    http://onlinelibrary.wiley.com/doi/10.1002/spe.4380111102/pdf
+#[allow(trivial_casts)]
+fn break_knuth_plass<'a, T: Clone + Iterator<Item = &'a WordInfo<'a>>>(
+    mut iter: T,
+    args: &mut BreakArgs<'a>,
+) {
+    // run the algorithm to get the breakpoints
+    let breakpoints = find_kp_breakpoints(iter.clone(), args);
+
+    // iterate through the breakpoints (note that breakpoints is in reverse break order, so we .rev() it
+    let result: (bool, bool) = breakpoints.iter().rev().fold(
+        (false, false),
+        |(mut prev_punct, mut fresh), &(next_break, break_before)| {
+            if fresh {
+                write_newline(args.indent_str, &mut args.output);
+            }
+            // at each breakpoint, keep emitting words until we find the word matching this breakpoint
+            for winfo in &mut iter {
+                let (slen, word) = slice_if_fresh(
+                    fresh,
+                    winfo.word,
+                    winfo.word_start,
+                    args.uniform,
+                    winfo.new_line,
+                    winfo.sentence_start,
+                    prev_punct,
+                );
+                fresh = false;
+                prev_punct = winfo.ends_punct;
+
+                // We find identical breakpoints here by comparing addresses of the references.
+                // This is OK because the backing vector is not mutating once we are linebreaking.
+                let winfo_ptr = winfo as *const _;
+                let next_break_ptr = next_break as *const _;
+                if winfo_ptr == next_break_ptr {
+                    // OK, we found the matching word
+                    if break_before {
+                        write_newline(args.indent_str, &mut args.output);
+                        write_with_spaces(&winfo.word[winfo.word_start..], 0, &mut args.output);
+                    } else {
+                        // breaking after this word, so that means "fresh" is true for the next iteration
+                        write_with_spaces(word, slen, &mut args.output);
+                        fresh = true;
+                    }
+                    break;
+                }
+                write_with_spaces(word, slen, &mut args.output);
+            }
+            (prev_punct, fresh)
+        },
+    );
+    let (mut prev_punct, mut fresh) = result;
+
+    // after the last linebreak, write out the rest of the final line.
+    for winfo in iter {
+        if fresh {
+            write_newline(args.indent_str, &mut args.output);
+        }
+        let (slen, word) = slice_if_fresh(
+            fresh,
+            winfo.word,
+            winfo.word_start,
+            args.uniform,
+            winfo.new_line,
+            winfo.sentence_start,
+            prev_punct,
+        );
+        prev_punct = winfo.ends_punct;
+        fresh = false;
+        write_with_spaces(word, slen, &mut args.output);
+    }
+
+    args.output.push('\n');
+}
+
+struct LineBreak<'a> {
+    prev: usize,
+    linebreak: Option<&'a WordInfo<'a>>,
+    break_before: bool,
+    demerits: i64,
+    prev_rat: f32,
+    length: usize,
+    fresh: bool,
+}
+
+#[allow(clippy::cognitive_complexity)]
+#[allow(clippy::cast_possible_wrap)]
+fn find_kp_breakpoints<'a, T: Iterator<Item = &'a WordInfo<'a>>>(
+    iter: T,
+    args: &BreakArgs<'a>,
+) -> Vec<(&'a WordInfo<'a>, bool)> {
+    let mut iter = iter.peekable();
+    // set up the initial null linebreak
+    let mut linebreaks = vec![LineBreak {
+        prev: 0,
+        linebreak: None,
+        break_before: false,
+        demerits: 0,
+        prev_rat: 0.0,
+        length: args.init_len,
+        fresh: false,
+    }];
+    // this vec holds the current active linebreaks; next_ holds the breaks that will be active for
+    // the next word
+    let mut active_breaks = vec![0];
+    let mut next_active_breaks = vec![];
+
+    let stretch = args.opts.width - args.opts.goal;
+    let minlength = args.opts.goal.max(stretch + 1) - stretch;
+    let mut new_linebreaks = vec![];
+    let mut is_sentence_start = false;
+    let mut least_demerits = 0;
+    loop {
+        let Some(w) = iter.next() else { break };
+
+        // if this is the last word, we don't add additional demerits for this break
+        let (is_last_word, is_sentence_end) = match iter.peek() {
+            None => (true, true),
+            Some(&&WordInfo {
+                sentence_start: st,
+                new_line: nl,
+                ..
+            }) => (false, st || (nl && w.ends_punct)),
+        };
+
+        // should we be adding extra space at the beginning of the next sentence?
+        let slen = compute_slen(args.uniform, w.new_line, is_sentence_start, false);
+
+        let mut ld_new = i64::MAX;
+        let mut ld_next = i64::MAX;
+        let mut ld_idx = 0;
+        new_linebreaks.clear();
+        next_active_breaks.clear();
+        // go through each active break, extending it and possibly adding a new active
+        // break if we are above the minimum required length
+        #[allow(clippy::explicit_iter_loop)]
+        for &i in active_breaks.iter() {
+            let active = &mut linebreaks[i];
+            // normalize demerits to avoid overflow, and record if this is the least
+            active.demerits -= least_demerits;
+            if active.demerits < ld_next {
+                ld_next = active.demerits;
+                ld_idx = i;
+            }
+
+            // get the new length
+            let tlen = w.word_nchars
+                + args.compute_width(w, active.length, active.fresh)
+                + slen
+                + active.length;
+
+            // if tlen is longer than args.opts.width, we drop this break from the active list
+            // otherwise, we extend the break, and possibly add a new break at this point
+            if tlen <= args.opts.width {
+                // this break will still be active next time
+                next_active_breaks.push(i);
+                // we can put this word on this line
+                active.fresh = false;
+                active.length = tlen;
+
+                // if we're above the minlength, we can also consider breaking here
+                if tlen >= minlength {
+                    let (new_demerits, new_ratio) = if is_last_word {
+                        // there is no penalty for the final line's length
+                        (0, 0.0)
+                    } else {
+                        compute_demerits(
+                            args.opts.goal as isize - tlen as isize,
+                            stretch,
+                            w.word_nchars,
+                            active.prev_rat,
+                        )
+                    };
+
+                    // do not even consider adding a line that has too many demerits
+                    // also, try to detect overflow by checking signum
+                    let total_demerits = new_demerits + active.demerits;
+                    if new_demerits < BAD_INFTY_SQ
+                        && total_demerits < ld_new
+                        && active.demerits.signum() <= new_demerits.signum()
+                    {
+                        ld_new = total_demerits;
+                        new_linebreaks.push(LineBreak {
+                            prev: i,
+                            linebreak: Some(w),
+                            break_before: false,
+                            demerits: total_demerits,
+                            prev_rat: new_ratio,
+                            length: args.indent_len,
+                            fresh: true,
+                        });
+                    }
+                }
+            }
+        }
+
+        // if we generated any new linebreaks, add the last one to the list
+        // the last one is always the best because we don't add to new_linebreaks unless
+        // it's better than the best one so far
+        match new_linebreaks.pop() {
+            None => (),
+            Some(lb) => {
+                next_active_breaks.push(linebreaks.len());
+                linebreaks.push(lb);
+            }
+        }
+
+        if next_active_breaks.is_empty() {
+            // every potential linebreak is too long! choose the linebreak with the least demerits, ld_idx
+            let new_break =
+                restart_active_breaks(args, &linebreaks[ld_idx], ld_idx, w, slen, minlength);
+            next_active_breaks.push(linebreaks.len());
+            linebreaks.push(new_break);
+            least_demerits = 0;
+        } else {
+            // next time around, normalize out the demerits fields
+            // on active linebreaks to make overflow less likely
+            least_demerits = cmp::max(ld_next, 0);
+        }
+        // swap in new list of active breaks
+        mem::swap(&mut active_breaks, &mut next_active_breaks);
+        // If this was the last word in a sentence, the next one must be the first in the next.
+        is_sentence_start = is_sentence_end;
+    }
+
+    // return the best path
+    build_best_path(&linebreaks, &active_breaks)
+}
+
+fn build_best_path<'a>(paths: &[LineBreak<'a>], active: &[usize]) -> Vec<(&'a WordInfo<'a>, bool)> {
+    // of the active paths, we select the one with the fewest demerits
+    active
+        .iter()
+        .min_by_key(|&&a| paths[a].demerits)
+        .map(|&(mut best_idx)| {
+            let mut breakwords = vec![];
+            // now, chase the pointers back through the break list, recording
+            // the words at which we should break
+            loop {
+                let next_best = &paths[best_idx];
+                match next_best.linebreak {
+                    None => return breakwords,
+                    Some(prev) => {
+                        breakwords.push((prev, next_best.break_before));
+                        best_idx = next_best.prev;
+                    }
+                }
+            }
+        })
+        .unwrap_or_default()
+}
+
+// "infinite" badness is more like (1+BAD_INFTY)^2 because of how demerits are computed
+const BAD_INFTY: i64 = 10_000_000;
+const BAD_INFTY_SQ: i64 = BAD_INFTY * BAD_INFTY;
+// badness = BAD_MULT * abs(r) ^ 3
+const BAD_MULT: f32 = 100.0;
+// DR_MULT is multiplier for delta-R between lines
+const DR_MULT: f32 = 600.0;
+// DL_MULT is penalty multiplier for short words at end of line
+const DL_MULT: f32 = 300.0;
+
+#[allow(clippy::cast_precision_loss)]
+#[allow(clippy::cast_possible_truncation)]
+fn compute_demerits(delta_len: isize, stretch: usize, wlen: usize, prev_rat: f32) -> (i64, f32) {
+    // how much stretch are we using?
+    let ratio = if delta_len == 0 {
+        0.0f32
+    } else {
+        delta_len as f32 / stretch as f32
+    };
+
+    // compute badness given the stretch ratio
+    let bad_linelen = if ratio.abs() > 1.0f32 {
+        BAD_INFTY
+    } else {
+        (BAD_MULT * ratio.powi(3).abs()) as i64
+    };
+
+    // we penalize lines ending in really short words
+    let bad_wordlen = if wlen >= stretch {
+        0
+    } else {
+        (DL_MULT
+            * ((stretch - wlen) as f32 / (stretch - 1) as f32)
+                .powi(3)
+                .abs()) as i64
+    };
+
+    // we penalize lines that have very different ratios from previous lines
+    let bad_delta_r = (DR_MULT * ((ratio - prev_rat) / 2.0).powi(3).abs()) as i64;
+
+    let demerits = i64::pow(1 + bad_linelen + bad_wordlen + bad_delta_r, 2);
+
+    (demerits, ratio)
+}
+
+#[allow(clippy::cast_possible_wrap)]
+fn restart_active_breaks<'a>(
+    args: &BreakArgs<'a>,
+    active: &LineBreak<'a>,
+    act_idx: usize,
+    w: &'a WordInfo<'a>,
+    slen: usize,
+    min: usize,
+) -> LineBreak<'a> {
+    let (break_before, line_length) = if active.fresh {
+        // never break before a word if that word would be the first on a line
+        (false, args.indent_len)
+    } else {
+        // choose the lesser evil: breaking too early, or breaking too late
+        let wlen = w.word_nchars + args.compute_width(w, active.length, active.fresh);
+        let underlen = min as isize - active.length as isize;
+        let overlen = (wlen + slen + active.length) as isize - args.opts.width as isize;
+        if overlen > underlen {
+            // break early, put this word on the next line
+            (true, args.indent_len + w.word_nchars)
+        } else {
+            (false, args.indent_len)
+        }
+    };
+
+    // restart the linebreak. This will be our only active path.
+    LineBreak {
+        prev: act_idx,
+        linebreak: Some(w),
+        break_before,
+        demerits: 0, // this is the only active break, so we can reset the demerit count
+        prev_rat: if break_before { 1.0 } else { -1.0 },
+        length: line_length,
+        fresh: !break_before,
+    }
+}
+
+// Number of spaces to add before a word, based on mode, newline, sentence start.
+#[allow(clippy::fn_params_excessive_bools)]
+fn compute_slen(uniform: bool, newline: bool, start: bool, punct: bool) -> usize {
+    if uniform || newline {
+        if start || (newline && punct) { 2 } else { 1 }
+    } else {
+        0
+    }
+}
+
+// If we're on a fresh line, slen=0 and we slice off leading whitespace.
+// Otherwise, compute slen and leave whitespace alone.
+#[allow(clippy::fn_params_excessive_bools)]
+fn slice_if_fresh(
+    fresh: bool,
+    word: &str,
+    start: usize,
+    uniform: bool,
+    newline: bool,
+    second_start: bool,
+    punct: bool,
+) -> (usize, &str) {
+    if fresh {
+        (0, &word[start..])
+    } else {
+        (compute_slen(uniform, newline, second_start, punct), word)
+    }
+}
+
+// Write a newline and add the indent.
+fn write_newline(indent: &str, output: &mut String) {
+    output.push('\n');
+    output.push_str(indent);
+}
+
+// Write the word, along with slen spaces.
+fn write_with_spaces(word: &str, slen: usize, output: &mut String) {
+    if slen == 2 {
+        output.push_str("  ");
+    } else if slen == 1 {
+        output.push(' ');
+    }
+    output.push_str(word);
+}
diff --git a/crates/fmt/src/parasplit.rs b/crates/fmt/src/parasplit.rs
new file mode 100644
index 0000000..d4723cb
--- /dev/null
+++ b/crates/fmt/src/parasplit.rs
@@ -0,0 +1,629 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 uutils developers
+// SPDX-License-Identifier: MIT
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+// This file is part of the uutils coreutils package.
+//
+// For the full copyright and license information, please view the LICENSE
+// file that was distributed with this source code.
+
+use std::iter::Peekable;
+use std::slice::Iter;
+use unicode_width::UnicodeWidthChar;
+
+use crate::FmtOptions;
+
+fn char_width(c: char) -> usize {
+    if (c as usize) < 0xA0 {
+        // if it is ASCII, call it exactly 1 wide (including control chars)
+        // calling control chars' widths 1 is consistent with OpenBSD fmt
+        1
+    } else {
+        // otherwise, get the unicode width
+        // note that we shouldn't actually get None here because only c < 0xA0
+        // can return None, but for safety and future-proofing we do it this way
+        UnicodeWidthChar::width(c).unwrap_or(1)
+    }
+}
+
+// lines with PSKIP, lacking PREFIX, or which are entirely blank are
+// NoFormatLines; otherwise, they are FormatLines
+#[derive(Debug)]
+pub(super) enum Line {
+    FormatLine(FileLine),
+    NoFormatLine(String, bool),
+}
+
+impl Line {
+    // when we know that it's a FormatLine, as in the ParagraphStream iterator
+    fn get_formatline(self) -> FileLine {
+        match self {
+            Self::FormatLine(fl) => fl,
+            Self::NoFormatLine(..) => panic!("Found NoFormatLine when expecting FormatLine"),
+        }
+    }
+
+    // when we know that it's a NoFormatLine, as in the ParagraphStream iterator
+    fn get_noformatline(self) -> (String, bool) {
+        match self {
+            Self::NoFormatLine(s, b) => (s, b),
+            Self::FormatLine(..) => panic!("Found FormatLine when expecting NoFormatLine"),
+        }
+    }
+}
+
+/// Each line's prefix has to be considered to know whether to merge it with
+/// the next line or not
+#[derive(Debug)]
+pub(super) struct FileLine {
+    line: String,
+    /// The end of the indent, always the start of the text
+    indent_end: usize,
+
+    /// The end of the PREFIX's indent, that is, the spaces before the prefix
+    prefix_indent_end: usize,
+
+    /// Display length of indent taking into account tabs
+    indent_len: usize,
+
+    /// PREFIX indent length taking into account tabs
+    prefix_len: usize,
+}
+
+/// Iterator that produces a stream of Lines from a file
+pub(super) struct FileLines<'a> {
+    opts: &'a FmtOptions,
+    lines: std::str::Lines<'a>,
+}
+
+impl FileLines<'_> {
+    fn new<'b>(opts: &'b FmtOptions, lines: std::str::Lines<'b>) -> FileLines<'b> {
+        FileLines { opts, lines }
+    }
+
+    /// returns true if this line should be formatted
+    fn match_prefix(&self, line: &str) -> (bool, usize) {
+        let Some(prefix) = &self.opts.prefix else {
+            return (true, 0);
+        };
+
+        FileLines::match_prefix_generic(prefix, line, self.opts.exact_prefix)
+    }
+
+    /// returns true if this line should be formatted
+    fn match_anti_prefix(&self, line: &str) -> bool {
+        let Some(anti_prefix) = &self.opts.anti_prefix else {
+            return true;
+        };
+
+        match FileLines::match_prefix_generic(anti_prefix, line, self.opts.exact_anti_prefix) {
+            (true, _) => false,
+            (_, _) => true,
+        }
+    }
+
+    fn match_prefix_generic(pfx: &str, line: &str, exact: bool) -> (bool, usize) {
+        if line.starts_with(pfx) {
+            return (true, 0);
+        }
+
+        if !exact {
+            // we do it this way rather than byte indexing to support unicode whitespace chars
+            for (i, char) in line.char_indices() {
+                if line[i..].starts_with(pfx) {
+                    return (true, i);
+                } else if !char.is_whitespace() {
+                    break;
+                }
+            }
+        }
+
+        (false, 0)
+    }
+
+    fn compute_indent(&self, string: &str, prefix_end: usize) -> (usize, usize, usize) {
+        let mut prefix_len = 0;
+        let mut indent_len = 0;
+        let mut indent_end = 0;
+        for (os, c) in string.char_indices() {
+            if os == prefix_end {
+                // we found the end of the prefix, so this is the printed length of the prefix here
+                prefix_len = indent_len;
+            }
+
+            if (os >= prefix_end) && !c.is_whitespace() {
+                // found first non-whitespace after prefix, this is indent_end
+                indent_end = os;
+                break;
+            } else if c == '\t' {
+                // compute tab length
+                indent_len = (indent_len / self.opts.tabwidth + 1) * self.opts.tabwidth;
+            } else {
+                // non-tab character
+                indent_len += char_width(c);
+            }
+        }
+        (indent_end, prefix_len, indent_len)
+    }
+}
+
+impl Iterator for FileLines<'_> {
+    type Item = Line;
+
+    fn next(&mut self) -> Option<Line> {
+        let n = self.lines.next()?;
+
+        // if this line is entirely whitespace,
+        // emit a blank line
+        // Err(true) indicates that this was a linebreak,
+        // which is important to know when detecting mail headers
+        if n.chars().all(char::is_whitespace) {
+            return Some(Line::NoFormatLine(String::new(), true));
+        }
+
+        let (pmatch, poffset) = self.match_prefix(n);
+
+        // if this line does not match the prefix,
+        // emit the line unprocessed and iterate again
+        if !pmatch {
+            return Some(Line::NoFormatLine(n.to_owned(), false));
+        }
+
+        // if the line matches the prefix, but is blank after,
+        // don't allow lines to be combined through it (that is,
+        // treat it like a blank line, except that since it's
+        // not truly blank we will not allow mail headers on the
+        // following line)
+        if pmatch
+            && n[poffset + self.opts.prefix.as_ref().map_or(0, String::len)..]
+                .chars()
+                .all(char::is_whitespace)
+        {
+            return Some(Line::NoFormatLine(n.to_owned(), false));
+        }
+
+        // skip if this line matches the anti_prefix
+        // (NOTE definition of match_anti_prefix is TRUE if we should process)
+        if !self.match_anti_prefix(n) {
+            return Some(Line::NoFormatLine(n.to_owned(), false));
+        }
+
+        // figure out the indent, prefix, and prefixindent ending points
+        let prefix_end = poffset + self.opts.prefix.as_ref().map_or(0, String::len);
+        let (indent_end, prefix_len, indent_len) = self.compute_indent(n, prefix_end);
+
+        Some(Line::FormatLine(FileLine {
+            line: n.to_owned(),
+            indent_end,
+            prefix_indent_end: poffset,
+            indent_len,
+            prefix_len,
+        }))
+    }
+}
+
+/// A paragraph : a collection of [`FileLines`] that are to be formatted
+/// plus info about the paragraph's indentation
+///
+/// We only retain the String from the [`FileLine`]; the other info
+/// is only there to help us in deciding how to merge lines into Paragraphs
+#[derive(Debug)]
+pub(super) struct Paragraph {
+    /// the lines of the file
+    lines: Vec<String>,
+    /// string representing the init, that is, the first line's indent
+    pub init_str: String,
+    /// printable length of the init string considering TABWIDTH
+    pub init_len: usize,
+    /// byte location of end of init in first line String
+    init_end: usize,
+    /// string representing indent
+    pub indent_str: String,
+    /// length of above
+    pub indent_len: usize,
+    /// byte location of end of indent (in crown and tagged mode, only applies to 2nd line and onward)
+    indent_end: usize,
+    /// we need to know if this is a mail header because we do word splitting differently in that case
+    pub mail_header: bool,
+}
+
+/// An iterator producing a stream of paragraphs from a stream of lines
+/// given a set of options.
+pub(super) struct ParagraphStream<'a> {
+    lines: Peekable<FileLines<'a>>,
+    next_mail: bool,
+    opts: &'a FmtOptions,
+}
+
+impl ParagraphStream<'_> {
+    pub(super) fn new<'b>(opts: &'b FmtOptions, text: &'b str) -> ParagraphStream<'b> {
+        let lines = FileLines::new(opts, text.lines()).peekable();
+        // at the beginning of the file, we might find mail headers
+        ParagraphStream {
+            lines,
+            next_mail: true,
+            opts,
+        }
+    }
+
+    /// Detect RFC822 mail header
+    fn is_mail_header(line: &FileLine) -> bool {
+        // a mail header begins with either "From " (envelope sender line)
+        // or with a sequence of printable ASCII chars (33 to 126, inclusive,
+        // except colon) followed by a colon.
+        if line.indent_end > 0 {
+            false
+        } else {
+            let l_slice = &line.line[..];
+            if l_slice.starts_with("From ") {
+                true
+            } else {
+                let Some(colon_posn) = l_slice.find(':') else {
+                    return false;
+                };
+
+                // header field must be nonzero length
+                if colon_posn == 0 {
+                    return false;
+                }
+
+                l_slice[..colon_posn]
+                    .chars()
+                    .all(|x| !matches!(x as usize, y if !(33..=126).contains(&y)))
+            }
+        }
+    }
+}
+
+impl Iterator for ParagraphStream<'_> {
+    type Item = Result<Paragraph, String>;
+
+    #[allow(clippy::cognitive_complexity)]
+    fn next(&mut self) -> Option<Result<Paragraph, String>> {
+        // return a NoFormatLine in an Err; it should immediately be output
+        let noformat = match self.lines.peek()? {
+            Line::FormatLine(_) => false,
+            Line::NoFormatLine(_, _) => true,
+        };
+
+        // found a NoFormatLine, immediately dump it out
+        if noformat {
+            let (s, nm) = self.lines.next().unwrap().get_noformatline();
+            self.next_mail = nm;
+            return Some(Err(s));
+        }
+
+        // found a FormatLine, now build a paragraph
+        let mut init_str = String::new();
+        let mut init_end = 0;
+        let mut init_len = 0;
+        let mut indent_str = String::new();
+        let mut indent_end = 0;
+        let mut indent_len = 0;
+        let mut prefix_len = 0;
+        let mut prefix_indent_end = 0;
+        let mut p_lines = Vec::new();
+
+        let mut in_mail = false;
+        let mut second_done = false; // for when we use crown or tagged mode
+        loop {
+            // peek ahead
+            // need to explicitly force fl out of scope before we can call self.lines.next()
+            let Some(Line::FormatLine(fl)) = self.lines.peek() else {
+                break;
+            };
+
+            if p_lines.is_empty() {
+                // first time through the loop, get things set up
+                // detect mail header
+                if self.opts.mail && self.next_mail && ParagraphStream::is_mail_header(fl) {
+                    in_mail = true;
+                    // there can't be any indent or prefixindent because otherwise is_mail_header
+                    // would fail since there cannot be any whitespace before the colon in a
+                    // valid header field
+                    indent_str.push_str("  ");
+                    indent_len = 2;
+                } else {
+                    if self.opts.crown_margin || self.opts.tagged_paragraph {
+                        init_str.push_str(&fl.line[..fl.indent_end]);
+                        init_len = fl.indent_len;
+                        init_end = fl.indent_end;
+                    } else {
+                        second_done = true;
+                    }
+
+                    // these will be overwritten in the 2nd line of crown or tagged mode, but
+                    // we are not guaranteed to get to the 2nd line, e.g., if the next line
+                    // is a NoFormatLine or None. Thus, we set sane defaults the 1st time around
+                    indent_str.push_str(&fl.line[..fl.indent_end]);
+                    indent_len = fl.indent_len;
+                    indent_end = fl.indent_end;
+
+                    // save these to check for matching lines
+                    prefix_len = fl.prefix_len;
+                    prefix_indent_end = fl.prefix_indent_end;
+
+                    // in tagged mode, add 4 spaces of additional indenting by default
+                    // (gnu fmt's behavior is different: it seems to find the closest column to
+                    // indent_end that is divisible by 3. But honestly that behavior seems
+                    // pretty arbitrary.
+                    // Perhaps a better default would be 1 TABWIDTH? But ugh that's so big.
+                    if self.opts.tagged_paragraph {
+                        indent_str.push_str("    ");
+                        indent_len += 4;
+                    }
+                }
+            } else if in_mail {
+                // lines following mail headers must begin with spaces
+                if fl.indent_end == 0 || (self.opts.prefix.is_some() && fl.prefix_indent_end == 0) {
+                    break; // this line does not begin with spaces
+                }
+            } else if !second_done {
+                // now we have enough info to handle crown margin and tagged mode
+
+                // in both crown and tagged modes we require that prefix_len is the same
+                if prefix_len != fl.prefix_len || prefix_indent_end != fl.prefix_indent_end {
+                    break;
+                }
+
+                // in tagged mode, indent has to be *different* on following lines
+                if self.opts.tagged_paragraph
+                    && indent_len - 4 == fl.indent_len
+                    && indent_end == fl.indent_end
+                {
+                    break;
+                }
+
+                // this is part of the same paragraph, get the indent info from this line
+                indent_str.clear();
+                indent_str.push_str(&fl.line[..fl.indent_end]);
+                indent_len = fl.indent_len;
+                indent_end = fl.indent_end;
+
+                second_done = true;
+            } else {
+                // detect mismatch
+                if indent_end != fl.indent_end
+                    || prefix_indent_end != fl.prefix_indent_end
+                    || indent_len != fl.indent_len
+                    || prefix_len != fl.prefix_len
+                {
+                    break;
+                }
+            }
+
+            p_lines.push(self.lines.next().unwrap().get_formatline().line);
+
+            // when we're in split-only mode, we never join lines, so stop here
+            if self.opts.split_only {
+                break;
+            }
+        }
+
+        // if this was a mail header, then the next line can be detected as one. Otherwise, it cannot.
+        // NOTE next_mail is true at ParagraphStream instantiation, and is set to true after a blank
+        // NoFormatLine.
+        self.next_mail = in_mail;
+
+        Some(Ok(Paragraph {
+            lines: p_lines,
+            init_str,
+            init_len,
+            init_end,
+            indent_str,
+            indent_len,
+            indent_end,
+            mail_header: in_mail,
+        }))
+    }
+}
+
+pub(super) struct ParaWords<'a> {
+    opts: &'a FmtOptions,
+    para: &'a Paragraph,
+    words: Vec<WordInfo<'a>>,
+}
+
+impl<'a> ParaWords<'a> {
+    pub(super) fn new(opts: &'a FmtOptions, para: &'a Paragraph) -> Self {
+        let mut pw = ParaWords {
+            opts,
+            para,
+            words: Vec::new(),
+        };
+        pw.create_words();
+        pw
+    }
+
+    fn create_words(&mut self) {
+        if self.para.mail_header {
+            // no extra spacing for mail headers; always exactly 1 space
+            // safe to trim_start on every line of a mail header, since the
+            // first line is guaranteed not to have any spaces
+            self.words.extend(
+                self.para
+                    .lines
+                    .iter()
+                    .flat_map(|x| x.split_whitespace())
+                    .map(|x| WordInfo {
+                        word: x,
+                        word_start: 0,
+                        word_nchars: x.len(), // OK for mail headers; only ASCII allowed (unicode is escaped)
+                        before_tab: None,
+                        after_tab: 0,
+                        sentence_start: false,
+                        ends_punct: false,
+                        new_line: false,
+                    }),
+            );
+        } else {
+            // first line
+            self.words
+                .extend(if self.opts.crown_margin || self.opts.tagged_paragraph {
+                    // crown and tagged mode has the "init" in the first line, so slice from there
+                    WordSplit::new(self.opts, &self.para.lines[0][self.para.init_end..])
+                } else {
+                    // otherwise we slice from the indent
+                    WordSplit::new(self.opts, &self.para.lines[0][self.para.indent_end..])
+                });
+
+            if self.para.lines.len() > 1 {
+                let indent_end = self.para.indent_end;
+                let opts = self.opts;
+                self.words.extend(
+                    self.para
+                        .lines
+                        .iter()
+                        .skip(1)
+                        .flat_map(|x| WordSplit::new(opts, &x[indent_end..])),
+                );
+            }
+        }
+    }
+
+    pub(super) fn words(&'a self) -> Iter<'a, WordInfo<'a>> {
+        self.words.iter()
+    }
+}
+
+struct WordSplit<'a> {
+    opts: &'a FmtOptions,
+    string: &'a str,
+    length: usize,
+    position: usize,
+    prev_punct: bool,
+}
+
+impl WordSplit<'_> {
+    fn analyze_tabs(&self, string: &str) -> (Option<usize>, usize, Option<usize>) {
+        // given a string, determine (length before tab) and (printed length after first tab)
+        // if there are no tabs, beforetab = -1 and aftertab is the printed length
+        let mut beforetab = None;
+        let mut aftertab = 0;
+        let mut word_start = None;
+        for (os, c) in string.char_indices() {
+            if !c.is_whitespace() {
+                word_start = Some(os);
+                break;
+            } else if c == '\t' {
+                if beforetab.is_none() {
+                    beforetab = Some(aftertab);
+                    aftertab = 0;
+                } else {
+                    aftertab = (aftertab / self.opts.tabwidth + 1) * self.opts.tabwidth;
+                }
+            } else {
+                aftertab += 1;
+            }
+        }
+        (beforetab, aftertab, word_start)
+    }
+}
+
+impl WordSplit<'_> {
+    fn new<'b>(opts: &'b FmtOptions, string: &'b str) -> WordSplit<'b> {
+        // wordsplits *must* start at a non-whitespace character
+        let trim_string = string.trim_start();
+        WordSplit {
+            opts,
+            string: trim_string,
+            length: string.len(),
+            position: 0,
+            prev_punct: false,
+        }
+    }
+
+    fn is_punctuation(c: char) -> bool {
+        matches!(c, '!' | '.' | '?')
+    }
+}
+
+pub(super) struct WordInfo<'a> {
+    pub word: &'a str,
+    pub word_start: usize,
+    pub word_nchars: usize,
+    pub before_tab: Option<usize>,
+    pub after_tab: usize,
+    pub sentence_start: bool,
+    pub ends_punct: bool,
+    pub new_line: bool,
+}
+
+// returns (&str, is_start_of_sentence)
+impl<'a> Iterator for WordSplit<'a> {
+    type Item = WordInfo<'a>;
+
+    fn next(&mut self) -> Option<WordInfo<'a>> {
+        if self.position >= self.length {
+            return None;
+        }
+
+        let old_position = self.position;
+        let new_line = old_position == 0;
+
+        // find the start of the next word, and record if we find a tab character
+        let (before_tab, after_tab, word_start) =
+            if let (b, a, Some(s)) = self.analyze_tabs(&self.string[old_position..]) {
+                (b, a, s + old_position)
+            } else {
+                self.position = self.length;
+                return None;
+            };
+
+        // find the beginning of the next whitespace
+        // note that this preserves the invariant that self.position
+        // points to whitespace character OR end of string
+        let mut word_nchars = 0;
+        self.position = match self.string[word_start..].find(|x: char| {
+            if x.is_whitespace() {
+                true
+            } else {
+                word_nchars += char_width(x);
+                false
+            }
+        }) {
+            None => self.length,
+            Some(s) => s + word_start,
+        };
+
+        let word_start_relative = word_start - old_position;
+        // if the previous sentence was punctuation and this sentence has >2 whitespace or one tab, is a new sentence.
+        let is_start_of_sentence =
+            self.prev_punct && (before_tab.is_some() || word_start_relative > 1);
+
+        // now record whether this word ends in punctuation
+        self.prev_punct = match self.string[..self.position].chars().next_back() {
+            Some(ch) => WordSplit::is_punctuation(ch),
+            _ => panic!("fatal: expected word not to be empty"),
+        };
+
+        let (word, word_start_relative, before_tab, after_tab) = if self.opts.uniform {
+            (&self.string[word_start..self.position], 0, None, 0)
+        } else {
+            (
+                &self.string[old_position..self.position],
+                word_start_relative,
+                before_tab,
+                after_tab,
+            )
+        };
+
+        Some(WordInfo {
+            word,
+            word_start: word_start_relative,
+            word_nchars,
+            before_tab,
+            after_tab,
+            sentence_start: is_start_of_sentence,
+            ends_punct: self.prev_punct,
+            new_line,
+        })
+    }
+}
diff --git a/crates/libmpv2/CHANGELOG.md b/crates/libmpv2/CHANGELOG.md
index dc6f861..a3d14d7 100644
--- a/crates/libmpv2/CHANGELOG.md
+++ b/crates/libmpv2/CHANGELOG.md
@@ -16,7 +16,7 @@ If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
 ## Version 3.0.0
 
-- \[breaking\] Support libmpv version 2.0 (mpv version 0.35.0). Mpv versions \<=
+- [breaking] Support libmpv version 2.0 (mpv version 0.35.0). Mpv versions \<=
   0.34.0 will no longer be supported.
 - Add OpenGL rendering
 
@@ -29,10 +29,10 @@ If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 ## Version 2.0.0
 
 - Add method `Mpv::with_initializer` to set options before initialization
-- \[breaking\] Borrow `&mut self` in `wait_event` to disallow using two events
+- [breaking] Borrow `&mut self` in `wait_event` to disallow using two events
   where the first points to data freed in the second `wait_event` call
-- \[breaking\] `PropertyData<'_>` is no longer `Clone` or `PartialEq`,
-  `Event<'_>` is no longer `Clone` to avoid cloning/comparing `MpvNode`
+- [breaking] `PropertyData<'_>` is no longer `Clone` or `PartialEq`, `Event<'_>`
+  is no longer `Clone` to avoid cloning/comparing `MpvNode`
 
 ## Version 1.1.0
 
diff --git a/crates/libmpv2/Cargo.toml b/crates/libmpv2/Cargo.toml
index a8a4ed6..fb2f5bf 100644
--- a/crates/libmpv2/Cargo.toml
+++ b/crates/libmpv2/Cargo.toml
@@ -24,7 +24,6 @@ publish = false
 
 [dependencies]
 libmpv2-sys = { path = "libmpv2-sys" }
-thiserror = "2.0.7"
 log.workspace = true
 
 [dev-dependencies]
diff --git a/crates/libmpv2/examples/events.rs b/crates/libmpv2/examples/events.rs
index 8f7c79f..e502d5c 100644
--- a/crates/libmpv2/examples/events.rs
+++ b/crates/libmpv2/examples/events.rs
@@ -45,25 +45,27 @@ fn main() -> Result<()> {
             // Trigger `Event::EndFile`.
             mpv.command("playlist-next", &["force"]).unwrap();
         });
-        scope.spawn(move |_| loop {
-            let ev = ev_ctx.wait_event(600.).unwrap_or(Err(Error::Null));
+        scope.spawn(move |_| {
+            loop {
+                let ev = ev_ctx.wait_event(600.).unwrap_or(Err(Error::Null));
 
-            match ev {
-                Ok(Event::EndFile(r)) => {
-                    println!("Exiting! Reason: {:?}", r);
-                    break;
-                }
+                match ev {
+                    Ok(Event::EndFile(r)) => {
+                        println!("Exiting! Reason: {:?}", r);
+                        break;
+                    }
 
-                Ok(Event::PropertyChange {
-                    name: "demuxer-cache-state",
-                    change: PropertyData::Node(mpv_node),
-                    ..
-                }) => {
-                    let ranges = seekable_ranges(mpv_node);
-                    println!("Seekable ranges updated: {:?}", ranges);
+                    Ok(Event::PropertyChange {
+                        name: "demuxer-cache-state",
+                        change: PropertyData::Node(mpv_node),
+                        ..
+                    }) => {
+                        let ranges = seekable_ranges(mpv_node);
+                        println!("Seekable ranges updated: {:?}", ranges);
+                    }
+                    Ok(e) => println!("Event triggered: {:?}", e),
+                    Err(e) => println!("Event errored: {:?}", e),
                 }
-                Ok(e) => println!("Event triggered: {:?}", e),
-                Err(e) => println!("Event errored: {:?}", e),
             }
         });
     })
diff --git a/crates/libmpv2/examples/opengl.rs b/crates/libmpv2/examples/opengl.rs
index 1de307f..9f595aa 100644
--- a/crates/libmpv2/examples/opengl.rs
+++ b/crates/libmpv2/examples/opengl.rs
@@ -9,8 +9,8 @@
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
 use libmpv2::{
-    render::{OpenGLInitParams, RenderContext, RenderParam, RenderParamApiType},
     Mpv,
+    render::{OpenGLInitParams, RenderContext, RenderParam, RenderParamApiType},
 };
 use std::{env, ffi::c_void};
 
diff --git a/crates/libmpv2/libmpv2-sys/Cargo.toml b/crates/libmpv2/libmpv2-sys/Cargo.toml
index b0514b8..96141d3 100644
--- a/crates/libmpv2/libmpv2-sys/Cargo.toml
+++ b/crates/libmpv2/libmpv2-sys/Cargo.toml
@@ -23,4 +23,4 @@ rust-version.workspace = true
 publish = false
 
 [build-dependencies]
-bindgen = { version = "0.71.1" }
+bindgen = { version = "0.72.0" }
diff --git a/crates/libmpv2/libmpv2-sys/build.rs b/crates/libmpv2/libmpv2-sys/build.rs
index bf9a02e..45c2450 100644
--- a/crates/libmpv2/libmpv2-sys/build.rs
+++ b/crates/libmpv2/libmpv2-sys/build.rs
@@ -30,7 +30,9 @@ fn main() {
             ),
             "--verbose",
         ])
-        .generate_comments(true)
+        // NOTE(@bpeetz): The comments are interpreted as doc-tests,
+        // which obviously fail, as the code is c. <2025-06-16>
+        .generate_comments(false)
         .generate()
         .expect("Unable to generate bindings");
 
diff --git a/crates/libmpv2/src/lib.rs b/crates/libmpv2/src/lib.rs
index 4d8d18a..f6c2103 100644
--- a/crates/libmpv2/src/lib.rs
+++ b/crates/libmpv2/src/lib.rs
@@ -35,7 +35,7 @@ use std::os::raw as ctype;
 pub const MPV_CLIENT_API_MAJOR: ctype::c_ulong = 2;
 pub const MPV_CLIENT_API_MINOR: ctype::c_ulong = 2;
 pub const MPV_CLIENT_API_VERSION: ctype::c_ulong =
-    MPV_CLIENT_API_MAJOR << 16 | MPV_CLIENT_API_MINOR;
+    (MPV_CLIENT_API_MAJOR << 16) | MPV_CLIENT_API_MINOR;
 
 mod mpv;
 #[cfg(test)]
@@ -67,8 +67,8 @@ pub mod mpv_error {
     pub use libmpv2_sys::mpv_error_MPV_ERROR_INVALID_PARAMETER as InvalidParameter;
     pub use libmpv2_sys::mpv_error_MPV_ERROR_LOADING_FAILED as LoadingFailed;
     pub use libmpv2_sys::mpv_error_MPV_ERROR_NOMEM as NoMem;
-    pub use libmpv2_sys::mpv_error_MPV_ERROR_NOTHING_TO_PLAY as NothingToPlay;
     pub use libmpv2_sys::mpv_error_MPV_ERROR_NOT_IMPLEMENTED as NotImplemented;
+    pub use libmpv2_sys::mpv_error_MPV_ERROR_NOTHING_TO_PLAY as NothingToPlay;
     pub use libmpv2_sys::mpv_error_MPV_ERROR_OPTION_ERROR as OptionError;
     pub use libmpv2_sys::mpv_error_MPV_ERROR_OPTION_FORMAT as OptionFormat;
     pub use libmpv2_sys::mpv_error_MPV_ERROR_OPTION_NOT_FOUND as OptionNotFound;
diff --git a/crates/libmpv2/src/mpv.rs b/crates/libmpv2/src/mpv.rs
index 07d0976..29dac8d 100644
--- a/crates/libmpv2/src/mpv.rs
+++ b/crates/libmpv2/src/mpv.rs
@@ -184,7 +184,7 @@ pub mod mpv_node {
 
     pub mod sys_node {
         use super::{DropWrapper, MpvNode, MpvNodeArrayIter, MpvNodeMapIter};
-        use crate::{mpv_error, mpv_format, Error, Result};
+        use crate::{Error, Result, mpv_error, mpv_format};
         use std::rc::Rc;
 
         #[derive(Debug, Clone)]
@@ -375,14 +375,14 @@ unsafe impl SetData for String {
 /// Wrapper around an `&str` returned by mpv, that properly deallocates it with mpv's allocator.
 #[derive(Debug, Hash, Eq, PartialEq)]
 pub struct MpvStr<'a>(&'a str);
-impl<'a> Deref for MpvStr<'a> {
+impl Deref for MpvStr<'_> {
     type Target = str;
 
     fn deref(&self) -> &str {
         self.0
     }
 }
-impl<'a> Drop for MpvStr<'a> {
+impl Drop for MpvStr<'_> {
     fn drop(&mut self) {
         unsafe { libmpv2_sys::mpv_free(self.0.as_ptr() as *mut u8 as _) };
     }
@@ -403,7 +403,7 @@ unsafe impl<'a> GetData for MpvStr<'a> {
     }
 }
 
-unsafe impl<'a> SetData for &'a str {
+unsafe impl SetData for &str {
     fn call_as_c_void<T, F: FnMut(*mut ctype::c_void) -> Result<T>>(self, mut fun: F) -> Result<T> {
         let string = CString::new(self)?;
         fun((&mut string.as_ptr()) as *mut *const ctype::c_char as *mut _)
@@ -511,9 +511,8 @@ impl Mpv {
         }
 
         initializer(MpvInitializer { ctx })?;
-        mpv_err((), unsafe { libmpv2_sys::mpv_initialize(ctx) }).map_err(|err| {
+        mpv_err((), unsafe { libmpv2_sys::mpv_initialize(ctx) }).inspect_err(|_| {
             unsafe { libmpv2_sys::mpv_terminate_destroy(ctx) };
-            err
         })?;
 
         let ctx = unsafe { NonNull::new_unchecked(ctx) };
@@ -526,19 +525,6 @@ impl Mpv {
         })
     }
 
-    /// Execute a command
-    pub fn execute(&self, name: &str, args: &[&str]) -> Result<()> {
-        if args.is_empty() {
-            debug!("Running mpv command: '{}'", name);
-        } else {
-            debug!("Running mpv command: '{} {}'", name, args.join(" "));
-        }
-
-        self.command(name, args)?;
-
-        Ok(())
-    }
-
     /// Load a configuration file. The path has to be absolute, and a file.
     pub fn load_config(&self, path: &str) -> Result<()> {
         let file = CString::new(path)?.into_raw();
@@ -562,7 +548,7 @@ impl Mpv {
     /// Send a command to the `Mpv` instance. This uses `mpv_command_string` internally,
     /// so that the syntax is the same as described in the [manual for the input.conf](https://mpv.io/manual/master/#list-of-input-commands).
     ///
-    /// Note that you may have to escape strings with `""` when they contain spaces.
+    /// Note that this function escapes the arguments for you.
     ///
     /// # Examples
     ///
@@ -583,12 +569,19 @@ impl Mpv {
     /// # }
     /// ```
     pub fn command(&self, name: &str, args: &[&str]) -> Result<()> {
-        let mut cmd = name.to_owned();
+        fn escape(input: &str) -> String {
+            input.replace('"', "\\\"")
+        }
+
+        let mut cmd = escape(name);
 
         for elem in args {
             cmd.push(' ');
-            cmd.push_str(elem);
+            cmd.push('"');
+            cmd.push_str(&escape(elem));
+            cmd.push('"');
         }
+        debug!("Running mpv command: '{}'", cmd);
 
         let raw = CString::new(cmd)?;
         mpv_err((), unsafe {
@@ -597,7 +590,9 @@ impl Mpv {
     }
 
     /// Set the value of a property.
-    pub fn set_property<T: SetData>(&self, name: &str, data: T) -> Result<()> {
+    pub fn set_property<T: SetData + std::fmt::Display>(&self, name: &str, data: T) -> Result<()> {
+        debug!("Setting mpv property: '{name}' = '{data}'");
+
         let name = CString::new(name)?;
         let format = T::get_format().as_mpv_format() as _;
         data.call_as_c_void(|ptr| {
diff --git a/crates/libmpv2/src/mpv/errors.rs b/crates/libmpv2/src/mpv/errors.rs
index a2baee5..a2d3dd8 100644
--- a/crates/libmpv2/src/mpv/errors.rs
+++ b/crates/libmpv2/src/mpv/errors.rs
@@ -8,36 +8,52 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-use std::{ffi::NulError, os::raw as ctype, str::Utf8Error};
-
-use thiserror::Error;
+use std::{ffi::NulError, fmt::Display, os::raw as ctype, str::Utf8Error};
 
 use super::mpv_error;
 
 #[allow(missing_docs)]
 pub type Result<T> = ::std::result::Result<T, Error>;
 
-#[derive(Error, Debug)]
+#[derive(Debug)]
 pub enum Error {
-    #[error("loading file failed: {error}")]
-    Loadfile { error: String },
+    Loadfile {
+        error: String,
+    },
 
-    #[error("version mismatch detected! Linked version ({linked}) is unequal to the loaded version ({loaded})")]
     VersionMismatch {
         linked: ctype::c_ulong,
         loaded: ctype::c_ulong,
     },
 
-    #[error("invalid utf8 returned")]
     InvalidUtf8,
 
-    #[error("null pointer returned")]
     Null,
 
-    #[error("raw error returned: {}", to_string_mpv_error(*(.0)))]
     Raw(crate::MpvError),
 }
 
+impl std::error::Error for Error {}
+
+impl Display for Error {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            Error::Loadfile { error } => write!(f, "loading file failed: {error}"),
+            Error::VersionMismatch { linked, loaded } => write!(
+                f,
+                "version mismatch detected! Linked version ({linked}) is unequal to the loaded version ({loaded})"
+            ),
+            Error::InvalidUtf8 => f.write_str("invalid utf8 returned"),
+            Error::Null => f.write_str("null pointer returned"),
+            Error::Raw(raw) => write!(
+                f,
+                include_str!("./raw_error_warning.txt"),
+                to_string_mpv_error(*(raw))
+            ),
+        }
+    }
+}
+
 impl From<NulError> for Error {
     fn from(_other: NulError) -> Error {
         Error::Null
@@ -76,35 +92,70 @@ fn to_string_mpv_error_raw(num: crate::MpvError) -> (&'static str, &'static str)
 
         mpv_error::NoMem => ("Memory allocation failed.", ""),
 
-        mpv_error::Uninitialized => ("The mpv core wasn't configured and initialized yet", " See the notes in mpv_create()."),
+        mpv_error::Uninitialized => (
+            "The mpv core wasn't configured and initialized yet",
+            " See the notes in mpv_create().",
+        ),
 
-        mpv_error::InvalidParameter => ("Generic catch-all error if a parameter is set to an invalid or unsupported value.", "This is used if there is no better error code."),
+        mpv_error::InvalidParameter => (
+            "Generic catch-all error if a parameter is set to an invalid or unsupported value.",
+            "This is used if there is no better error code.",
+        ),
 
         mpv_error::OptionNotFound => ("Trying to set an option that doesn't exist.", ""),
-        mpv_error::OptionFormat => ("Trying to set an option using an unsupported MPV_FORMAT.", ""),
-        mpv_error::OptionError => ("Setting the option failed", " Typically this happens if the provided option value could not be parsed."),
+        mpv_error::OptionFormat => (
+            "Trying to set an option using an unsupported MPV_FORMAT.",
+            "",
+        ),
+        mpv_error::OptionError => (
+            "Setting the option failed",
+            " Typically this happens if the provided option value could not be parsed.",
+        ),
 
         mpv_error::PropertyNotFound => ("The accessed property doesn't exist.", ""),
-        mpv_error::PropertyFormat => ("Trying to set or get a property using an unsupported MPV_FORMAT.", ""),
-        mpv_error::PropertyUnavailable => ("The property exists, but is not available", "This usually happens when the associated subsystem is not active, e.g. querying audio parameters while audio is disabled."),
+        mpv_error::PropertyFormat => (
+            "Trying to set or get a property using an unsupported MPV_FORMAT.",
+            "",
+        ),
+        mpv_error::PropertyUnavailable => (
+            "The property exists, but is not available",
+            "This usually happens when the associated subsystem is not active, e.g. querying audio parameters while audio is disabled.",
+        ),
         mpv_error::PropertyError => ("Error setting or getting a property.", ""),
 
-        mpv_error::Command => ("General error when running a command with mpv_command and similar.", ""),
+        mpv_error::Command => (
+            "General error when running a command with mpv_command and similar.",
+            "",
+        ),
 
-        mpv_error::LoadingFailed => ("Generic error on loading (usually used with mpv_event_end_file.error).", ""),
+        mpv_error::LoadingFailed => (
+            "Generic error on loading (usually used with mpv_event_end_file.error).",
+            "",
+        ),
 
         mpv_error::AoInitFailed => ("Initializing the audio output failed.", ""),
         mpv_error::VoInitFailed => ("Initializing the video output failed.", ""),
 
-        mpv_error::NothingToPlay => ("There was no audio or video data to play", "This also happens if the file was recognized, but did not contain any audio or video streams, or no streams were selected."),
+        mpv_error::NothingToPlay => (
+            "There was no audio or video data to play",
+            "This also happens if the file was recognized, but did not contain any audio or video streams, or no streams were selected.",
+        ),
 
-        mpv_error::UnknownFormat => ("     * When trying to load the file, the file format could not be determined, or the file was too broken to open it.", ""),
+        mpv_error::UnknownFormat => (
+            "     * When trying to load the file, the file format could not be determined, or the file was too broken to open it.",
+            "",
+        ),
 
-        mpv_error::Generic => ("Generic error for signaling that certain system requirements are not fulfilled.", ""),
+        mpv_error::Generic => (
+            "Generic error for signaling that certain system requirements are not fulfilled.",
+            "",
+        ),
         mpv_error::NotImplemented => ("The API function which was called is a stub only", ""),
         mpv_error::Unsupported => ("Unspecified error.", ""),
 
-        mpv_error::Success => unreachable!("This is not an error. It's just here, to ensure that the 0 case marks an success'"),
+        mpv_error::Success => unreachable!(
+            "This is not an error. It's just here, to ensure that the 0 case marks an success'"
+        ),
         _ => unreachable!("Mpv seems to have changed it's constants."),
     }
 }
diff --git a/crates/libmpv2/src/mpv/events.rs b/crates/libmpv2/src/mpv/events.rs
index 6fb4683..f10ff6e 100644
--- a/crates/libmpv2/src/mpv/events.rs
+++ b/crates/libmpv2/src/mpv/events.rs
@@ -11,7 +11,7 @@
 use crate::mpv_node::sys_node::SysMpvNode;
 use crate::{mpv::mpv_err, *};
 
-use std::ffi::{c_void, CString};
+use std::ffi::{CString, c_void};
 use std::os::raw as ctype;
 use std::ptr::NonNull;
 use std::slice;
@@ -70,26 +70,28 @@ impl<'a> PropertyData<'a> {
     // SAFETY: meant to extract the data from an event property. See `mpv_event_property` in
     // `client.h`
     unsafe fn from_raw(format: MpvFormat, ptr: *mut ctype::c_void) -> Result<PropertyData<'a>> {
-        assert!(!ptr.is_null());
-        match format {
-            mpv_format::Flag => Ok(PropertyData::Flag(*(ptr as *mut bool))),
-            mpv_format::String => {
-                let char_ptr = *(ptr as *mut *mut ctype::c_char);
-                Ok(PropertyData::Str(mpv_cstr_to_str!(char_ptr)?))
-            }
-            mpv_format::OsdString => {
-                let char_ptr = *(ptr as *mut *mut ctype::c_char);
-                Ok(PropertyData::OsdStr(mpv_cstr_to_str!(char_ptr)?))
-            }
-            mpv_format::Double => Ok(PropertyData::Double(*(ptr as *mut f64))),
-            mpv_format::Int64 => Ok(PropertyData::Int64(*(ptr as *mut i64))),
-            mpv_format::Node => {
-                let sys_node = *(ptr as *mut libmpv2_sys::mpv_node);
-                let node = SysMpvNode::new(sys_node, false);
-                return Ok(PropertyData::Node(node.value().unwrap()));
+        unsafe {
+            assert!(!ptr.is_null());
+            match format {
+                mpv_format::Flag => Ok(PropertyData::Flag(*(ptr as *mut bool))),
+                mpv_format::String => {
+                    let char_ptr = *(ptr as *mut *mut ctype::c_char);
+                    Ok(PropertyData::Str(mpv_cstr_to_str!(char_ptr)?))
+                }
+                mpv_format::OsdString => {
+                    let char_ptr = *(ptr as *mut *mut ctype::c_char);
+                    Ok(PropertyData::OsdStr(mpv_cstr_to_str!(char_ptr)?))
+                }
+                mpv_format::Double => Ok(PropertyData::Double(*(ptr as *mut f64))),
+                mpv_format::Int64 => Ok(PropertyData::Int64(*(ptr as *mut i64))),
+                mpv_format::Node => {
+                    let sys_node = *(ptr as *mut libmpv2_sys::mpv_node);
+                    let node = SysMpvNode::new(sys_node, false);
+                    Ok(PropertyData::Node(node.value().unwrap()))
+                }
+                mpv_format::None => unreachable!(),
+                _ => unimplemented!(),
             }
-            mpv_format::None => unreachable!(),
-            _ => unimplemented!(),
         }
     }
 }
@@ -146,11 +148,13 @@ pub enum Event<'a> {
 }
 
 unsafe extern "C" fn wu_wrapper<F: Fn() + Send + 'static>(ctx: *mut c_void) {
-    if ctx.is_null() {
-        panic!("ctx for wakeup wrapper is NULL");
-    }
+    unsafe {
+        if ctx.is_null() {
+            panic!("ctx for wakeup wrapper is NULL");
+        }
 
-    (*(ctx as *mut F))();
+        (*(ctx as *mut F))();
+    }
 }
 
 /// Context to listen to events.
diff --git a/crates/libmpv2/src/mpv/protocol.rs b/crates/libmpv2/src/mpv/protocol.rs
index 31a5933..ee33411 100644
--- a/crates/libmpv2/src/mpv/protocol.rs
+++ b/crates/libmpv2/src/mpv/protocol.rs
@@ -17,7 +17,7 @@ use std::os::raw as ctype;
 use std::panic;
 use std::panic::RefUnwindSafe;
 use std::slice;
-use std::sync::{atomic::Ordering, Mutex};
+use std::sync::{Mutex, atomic::Ordering};
 
 impl Mpv {
     /// Create a context with which custom protocols can be registered.
@@ -63,26 +63,28 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = user_data as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = user_data as *mut ProtocolData<T, U>;
 
-    (*info).cookie = user_data;
-    (*info).read_fn = Some(read_wrapper::<T, U>);
-    (*info).seek_fn = Some(seek_wrapper::<T, U>);
-    (*info).size_fn = Some(size_wrapper::<T, U>);
-    (*info).close_fn = Some(close_wrapper::<T, U>);
+        (*info).cookie = user_data;
+        (*info).read_fn = Some(read_wrapper::<T, U>);
+        (*info).seek_fn = Some(seek_wrapper::<T, U>);
+        (*info).size_fn = Some(size_wrapper::<T, U>);
+        (*info).close_fn = Some(close_wrapper::<T, U>);
 
-    let ret = panic::catch_unwind(|| {
-        let uri = mpv_cstr_to_str!(uri as *const _).unwrap();
-        ptr::write(
-            (*data).cookie,
-            ((*data).open_fn)(&mut (*data).user_data, uri),
-        );
-    });
+        let ret = panic::catch_unwind(|| {
+            let uri = mpv_cstr_to_str!(uri as *const _).unwrap();
+            ptr::write(
+                (*data).cookie,
+                ((*data).open_fn)(&mut (*data).user_data, uri),
+            );
+        });
 
-    if ret.is_ok() {
-        0
-    } else {
-        mpv_error::Generic as _
+        if ret.is_ok() {
+            0
+        } else {
+            mpv_error::Generic as _
+        }
     }
 }
 
@@ -95,16 +97,14 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = cookie as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = cookie as *mut ProtocolData<T, U>;
 
-    let ret = panic::catch_unwind(|| {
-        let slice = slice::from_raw_parts_mut(buf, nbytes as _);
-        ((*data).read_fn)(&mut *(*data).cookie, slice)
-    });
-    if let Ok(ret) = ret {
-        ret
-    } else {
-        -1
+        let ret = panic::catch_unwind(|| {
+            let slice = slice::from_raw_parts_mut(buf, nbytes as _);
+            ((*data).read_fn)(&mut *(*data).cookie, slice)
+        });
+        ret.unwrap_or(-1)
     }
 }
 
@@ -113,18 +113,21 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = cookie as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = cookie as *mut ProtocolData<T, U>;
 
-    if (*data).seek_fn.is_none() {
-        return mpv_error::Unsupported as _;
-    }
+        if (*data).seek_fn.is_none() {
+            return mpv_error::Unsupported as _;
+        }
 
-    let ret =
-        panic::catch_unwind(|| (*(*data).seek_fn.as_ref().unwrap())(&mut *(*data).cookie, offset));
-    if let Ok(ret) = ret {
-        ret
-    } else {
-        mpv_error::Generic as _
+        let ret = panic::catch_unwind(|| {
+            (*(*data).seek_fn.as_ref().unwrap())(&mut *(*data).cookie, offset)
+        });
+        if let Ok(ret) = ret {
+            ret
+        } else {
+            mpv_error::Generic as _
+        }
     }
 }
 
@@ -133,17 +136,20 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = cookie as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = cookie as *mut ProtocolData<T, U>;
 
-    if (*data).size_fn.is_none() {
-        return mpv_error::Unsupported as _;
-    }
+        if (*data).size_fn.is_none() {
+            return mpv_error::Unsupported as _;
+        }
 
-    let ret = panic::catch_unwind(|| (*(*data).size_fn.as_ref().unwrap())(&mut *(*data).cookie));
-    if let Ok(ret) = ret {
-        ret
-    } else {
-        mpv_error::Unsupported as _
+        let ret =
+            panic::catch_unwind(|| (*(*data).size_fn.as_ref().unwrap())(&mut *(*data).cookie));
+        if let Ok(ret) = ret {
+            ret
+        } else {
+            mpv_error::Unsupported as _
+        }
     }
 }
 
@@ -153,9 +159,11 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = Box::from_raw(cookie as *mut ProtocolData<T, U>);
+    unsafe {
+        let data = Box::from_raw(cookie as *mut ProtocolData<T, U>);
 
-    panic::catch_unwind(|| (data.close_fn)(Box::from_raw(data.cookie)));
+        panic::catch_unwind(|| (data.close_fn)(Box::from_raw(data.cookie)));
+    }
 }
 
 struct ProtocolData<T, U> {
@@ -177,8 +185,8 @@ pub struct ProtocolContext<'parent, T: RefUnwindSafe, U: RefUnwindSafe> {
     _does_not_outlive: PhantomData<&'parent Mpv>,
 }
 
-unsafe impl<'parent, T: RefUnwindSafe, U: RefUnwindSafe> Send for ProtocolContext<'parent, T, U> {}
-unsafe impl<'parent, T: RefUnwindSafe, U: RefUnwindSafe> Sync for ProtocolContext<'parent, T, U> {}
+unsafe impl<T: RefUnwindSafe, U: RefUnwindSafe> Send for ProtocolContext<'_, T, U> {}
+unsafe impl<T: RefUnwindSafe, U: RefUnwindSafe> Sync for ProtocolContext<'_, T, U> {}
 
 impl<'parent, T: RefUnwindSafe, U: RefUnwindSafe> ProtocolContext<'parent, T, U> {
     fn new(
@@ -228,20 +236,23 @@ impl<T: RefUnwindSafe, U: RefUnwindSafe> Protocol<T, U> {
         seek_fn: Option<StreamSeek<T>>,
         size_fn: Option<StreamSize<T>>,
     ) -> Protocol<T, U> {
-        let c_layout = Layout::from_size_align(mem::size_of::<T>(), mem::align_of::<T>()).unwrap();
-        let cookie = alloc::alloc(c_layout) as *mut T;
-        let data = Box::into_raw(Box::new(ProtocolData {
-            cookie,
-            user_data,
+        unsafe {
+            let c_layout =
+                Layout::from_size_align(mem::size_of::<T>(), mem::align_of::<T>()).unwrap();
+            let cookie = alloc::alloc(c_layout) as *mut T;
+            let data = Box::into_raw(Box::new(ProtocolData {
+                cookie,
+                user_data,
 
-            open_fn,
-            close_fn,
-            read_fn,
-            seek_fn,
-            size_fn,
-        }));
+                open_fn,
+                close_fn,
+                read_fn,
+                seek_fn,
+                size_fn,
+            }));
 
-        Protocol { name, data }
+            Protocol { name, data }
+        }
     }
 
     fn register(&self, ctx: *mut libmpv2_sys::mpv_handle) -> Result<()> {
diff --git a/crates/libmpv2/src/mpv/raw_error_warning.txt b/crates/libmpv2/src/mpv/raw_error_warning.txt
new file mode 100644
index 0000000..277500a
--- /dev/null
+++ b/crates/libmpv2/src/mpv/raw_error_warning.txt
@@ -0,0 +1,5 @@
+Raw mpv error: {}
+
+This error is directly returned from `mpv`.
+This is probably caused by a bug in `yt`, please open an issue about
+this and try to replicate it with the `-vvvv` verbosity setting.
diff --git a/crates/libmpv2/src/mpv/raw_error_warning.txt.license b/crates/libmpv2/src/mpv/raw_error_warning.txt.license
new file mode 100644
index 0000000..7813eb6
--- /dev/null
+++ b/crates/libmpv2/src/mpv/raw_error_warning.txt.license
@@ -0,0 +1,9 @@
+yt - A fully featured command line YouTube client
+
+Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+SPDX-License-Identifier: GPL-3.0-or-later
+
+This file is part of Yt.
+
+You should have received a copy of the License along with this program.
+If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
diff --git a/crates/libmpv2/src/mpv/render.rs b/crates/libmpv2/src/mpv/render.rs
index c3f2dc9..02f70bb 100644
--- a/crates/libmpv2/src/mpv/render.rs
+++ b/crates/libmpv2/src/mpv/render.rs
@@ -8,9 +8,9 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-use crate::{mpv::mpv_err, Error, Result};
+use crate::{Error, Result, mpv::mpv_err};
 use std::collections::HashMap;
-use std::ffi::{c_void, CStr};
+use std::ffi::{CStr, c_void};
 use std::os::raw::c_int;
 use std::ptr;
 
@@ -125,26 +125,30 @@ impl<C> From<&RenderParam<C>> for u32 {
 }
 
 unsafe extern "C" fn gpa_wrapper<GLContext>(ctx: *mut c_void, name: *const i8) -> *mut c_void {
-    if ctx.is_null() {
-        panic!("ctx for get_proc_address wrapper is NULL");
-    }
+    unsafe {
+        if ctx.is_null() {
+            panic!("ctx for get_proc_address wrapper is NULL");
+        }
 
-    let params: *mut OpenGLInitParams<GLContext> = ctx as _;
-    let params = &*params;
-    (params.get_proc_address)(
-        &params.ctx,
-        CStr::from_ptr(name)
-            .to_str()
-            .expect("Could not convert function name to str"),
-    )
+        let params: *mut OpenGLInitParams<GLContext> = ctx as _;
+        let params = &*params;
+        (params.get_proc_address)(
+            &params.ctx,
+            CStr::from_ptr(name)
+                .to_str()
+                .expect("Could not convert function name to str"),
+        )
+    }
 }
 
 unsafe extern "C" fn ru_wrapper<F: Fn() + Send + 'static>(ctx: *mut c_void) {
-    if ctx.is_null() {
-        panic!("ctx for render_update wrapper is NULL");
-    }
+    unsafe {
+        if ctx.is_null() {
+            panic!("ctx for render_update wrapper is NULL");
+        }
 
-    (*(ctx as *mut F))();
+        (*(ctx as *mut F))();
+    }
 }
 
 impl<C> From<OpenGLInitParams<C>> for libmpv2_sys::mpv_opengl_init_params {
@@ -197,14 +201,18 @@ impl<C> From<RenderParam<C>> for libmpv2_sys::mpv_render_param {
 }
 
 unsafe fn free_void_data<T>(ptr: *mut c_void) {
-    drop(Box::<T>::from_raw(ptr as *mut T));
+    unsafe {
+        drop(Box::<T>::from_raw(ptr as *mut T));
+    }
 }
 
 unsafe fn free_init_params<C>(ptr: *mut c_void) {
-    let params = Box::from_raw(ptr as *mut libmpv2_sys::mpv_opengl_init_params);
-    drop(Box::from_raw(
-        params.get_proc_address_ctx as *mut OpenGLInitParams<C>,
-    ));
+    unsafe {
+        let params = Box::from_raw(ptr as *mut libmpv2_sys::mpv_opengl_init_params);
+        drop(Box::from_raw(
+            params.get_proc_address_ctx as *mut OpenGLInitParams<C>,
+        ));
+    }
 }
 
 impl RenderContext {
diff --git a/crates/termsize/.gitignore b/crates/termsize/.gitignore
new file mode 100644
index 0000000..5bc2870
--- /dev/null
+++ b/crates/termsize/.gitignore
@@ -0,0 +1,12 @@
+# yt - A fully featured command line YouTube client
+#
+# Copyright (C) 2025 softprops <d.tangren@gmail.com>
+# SPDX-License-Identifier: MIT
+#
+# This file is part of Yt.
+#
+# You should have received a copy of the License along with this program.
+# If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+target
+Cargo.lock
diff --git a/crates/termsize/Cargo.toml b/crates/termsize/Cargo.toml
new file mode 100644
index 0000000..10ab7ed
--- /dev/null
+++ b/crates/termsize/Cargo.toml
@@ -0,0 +1,36 @@
+# yt - A fully featured command line YouTube client
+#
+# Copyright (C) 2025 softprops <d.tangren@gmail.com>
+# SPDX-License-Identifier: MIT
+#
+# This file is part of Yt.
+#
+# You should have received a copy of the License along with this program.
+# If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+[package]
+name = "termsize"
+authors = [
+  "softprops <d.tangren@gmail.com>",
+  "Benedikt Peetz <benedikt.peetz@b-peetz.de>",
+]
+description = "Retrieves terminal size"
+repository = "https://github.com/softprops/termsize"
+homepage = "https://github.com/softprops/termsize"
+documentation = "http://softprops.github.io/termsize"
+keywords = ["tty", "terminal", "term", "size", "dimensions"]
+license = "MIT"
+readme = "README.md"
+version.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+publish = false
+
+[target.'cfg(unix)'.dependencies]
+libc = "0.2"
+
+[target.'cfg(windows)'.dependencies]
+winapi = { version = "0.3", features = ["handleapi", "fileapi", "wincon"] }
+
+[lints]
+workspace = true
diff --git a/crates/termsize/LICENSE b/crates/termsize/LICENSE
new file mode 100644
index 0000000..78c7d8a
--- /dev/null
+++ b/crates/termsize/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2015-2024 Doug Tangren
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/crates/termsize/LICENSE.license b/crates/termsize/LICENSE.license
new file mode 100644
index 0000000..3562ab9
--- /dev/null
+++ b/crates/termsize/LICENSE.license
@@ -0,0 +1,9 @@
+yt - A fully featured command line YouTube client
+
+Copyright (C) 2025 softprops <d.tangren@gmail.com>
+SPDX-License-Identifier: MIT
+
+This file is part of Yt.
+
+You should have received a copy of the License along with this program.
+If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
diff --git a/crates/termsize/README.md b/crates/termsize/README.md
new file mode 100644
index 0000000..305669b
--- /dev/null
+++ b/crates/termsize/README.md
@@ -0,0 +1,51 @@
+<!--
+yt - A fully featured command line YouTube client
+
+Copyright (C) 2025 softprops <d.tangren@gmail.com>
+SPDX-License-Identifier: MIT
+
+This file is part of Yt.
+
+You should have received a copy of the License along with this program.
+If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+-->
+
+# termsize
+
+[![CI](https://github.com/softprops/termsize/actions/workflows/ci.yml/badge.svg)](https://github.com/softprops/termsize/actions/workflows/ci.yml)
+[![Crates.io](https://img.shields.io/crates/v/termsize.svg)](https://crates.io/crates/termsize)
+
+> because terminal size matters
+
+Termsize is a rust crate providing a multi-platform interface for resolving your
+terminal's current size in rows and columns. On most unix systems, this is
+similar invoking the [stty(1)](http://man7.org/linux/man-pages/man1/stty.1.html)
+program, requesting the terminal size.
+
+## [Documentation](https://softprops.github.com/termsize)
+
+## install
+
+run `cargo add termsize` in your terminal or add the following to your
+`Cargo.toml` file
+
+```toml
+[dependencies]
+termsize = "0.1"
+```
+
+## usage
+
+Termize provides one function, `get`, which returns a `termsize::Size` struct
+exposing two fields: `rows` and `cols` representing the number of rows and
+columns a a terminal's stdout supports.
+
+```rust
+pub fn main() {
+  termsize::get().map(|{ rows, cols }| {
+    println!("rows {} cols {}", size.rows, size.cols)
+  });
+}
+```
+
+Doug Tangren (softprops) 2015-2024
diff --git a/crates/termsize/src/lib.rs b/crates/termsize/src/lib.rs
new file mode 100644
index 0000000..69e7b78
--- /dev/null
+++ b/crates/termsize/src/lib.rs
@@ -0,0 +1,52 @@
+#![deny(missing_docs)]
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 softprops <d.tangren@gmail.com>
+// SPDX-License-Identifier: MIT
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+//! Termsize is a tiny crate that provides a simple
+//! interface for retrieving the current
+//! [terminal interface](http://www.manpagez.com/man/4/tty/) size
+//!
+//! ```rust
+//! extern crate termsize;
+//!
+//! termsize::get().map(|size| println!("rows {} cols {}", size.rows, size.cols));
+//! ```
+
+/// Container for number of rows and columns
+#[derive(Debug, Clone, Copy)]
+pub struct Size {
+    /// number of rows
+    pub rows: u16,
+    /// number of columns
+    pub cols: u16,
+}
+
+#[cfg(unix)]
+#[path = "nix.rs"]
+mod imp;
+
+#[cfg(windows)]
+#[path = "win.rs"]
+mod imp;
+
+#[cfg(not(any(unix, windows)))]
+#[path = "other.rs"]
+mod imp;
+
+pub use imp::get;
+
+#[cfg(test)]
+mod tests {
+    use super::get;
+    #[test]
+    fn test_get() {
+        assert!(get().is_some());
+    }
+}
diff --git a/crates/termsize/src/nix.rs b/crates/termsize/src/nix.rs
new file mode 100644
index 0000000..d672f54
--- /dev/null
+++ b/crates/termsize/src/nix.rs
@@ -0,0 +1,100 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 softprops <d.tangren@gmail.com>
+// SPDX-License-Identifier: MIT
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::io::IsTerminal;
+
+use self::super::Size;
+use libc::{STDOUT_FILENO, TIOCGWINSZ, c_ushort, ioctl};
+
+/// A representation of the size of the current terminal
+#[repr(C)]
+#[derive(Debug)]
+struct UnixSize {
+    /// number of rows
+    pub rows: c_ushort,
+    /// number of columns
+    pub cols: c_ushort,
+    x: c_ushort,
+    y: c_ushort,
+}
+
+/// Gets the current terminal size
+#[must_use]
+pub fn get() -> Option<Size> {
+    // http://rosettacode.org/wiki/Terminal_control/Dimensions#Library:_BSD_libc
+    if !std::io::stdout().is_terminal() {
+        return None;
+    }
+    let mut us = UnixSize {
+        rows: 0,
+        cols: 0,
+        x: 0,
+        y: 0,
+    };
+    let r = unsafe { ioctl(STDOUT_FILENO, TIOCGWINSZ, &mut us) };
+    if r == 0 {
+        Some(Size {
+            rows: us.rows,
+            cols: us.cols,
+        })
+    } else {
+        None
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{super::Size, get};
+    use std::process::{Command, Output, Stdio};
+
+    #[cfg(target_os = "macos")]
+    fn stty_size() -> Output {
+        Command::new("stty")
+            .arg("-f")
+            .arg("/dev/stderr")
+            .arg("size")
+            .stderr(Stdio::inherit())
+            .output()
+            .expect("expected stty output")
+    }
+
+    #[cfg(not(target_os = "macos"))]
+    fn stty_size() -> Output {
+        Command::new("stty")
+            .arg("-F")
+            .arg("/dev/stderr")
+            .arg("size")
+            .stderr(Stdio::inherit())
+            .output()
+            .expect("expected stty output")
+    }
+
+    #[test]
+    fn test_shell() {
+        let output = stty_size();
+        assert!(output.status.success());
+        let stdout = String::from_utf8(output.stdout).expect("expected utf8");
+        let mut data = stdout.split_whitespace();
+        let rs = data
+            .next()
+            .expect("expected row")
+            .parse::<u16>()
+            .expect("expected u16 col");
+        let cs = data
+            .next()
+            .expect("expected col")
+            .parse::<u16>()
+            .expect("expected u16 col");
+        if let Some(Size { rows, cols }) = get() {
+            assert_eq!(rows, rs);
+            assert_eq!(cols, cs);
+        }
+    }
+}
diff --git a/crates/termsize/src/other.rs b/crates/termsize/src/other.rs
new file mode 100644
index 0000000..8a02f22
--- /dev/null
+++ b/crates/termsize/src/other.rs
@@ -0,0 +1,14 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 softprops <d.tangren@gmail.com>
+// SPDX-License-Identifier: MIT
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+/// Gets the current terminal size
+pub fn get() -> Option<super::Size> {
+    None
+}
diff --git a/crates/termsize/src/win.rs b/crates/termsize/src/win.rs
new file mode 100644
index 0000000..72d8433
--- /dev/null
+++ b/crates/termsize/src/win.rs
@@ -0,0 +1,52 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 softprops <d.tangren@gmail.com>
+// SPDX-License-Identifier: MIT
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::ptr;
+
+use winapi::um::{
+    fileapi::{CreateFileA, OPEN_EXISTING},
+    handleapi::INVALID_HANDLE_VALUE,
+    wincon::{CONSOLE_SCREEN_BUFFER_INFO, GetConsoleScreenBufferInfo},
+    winnt::{FILE_SHARE_WRITE, GENERIC_READ, GENERIC_WRITE},
+};
+
+use self::super::Size;
+
+/// Gets the current terminal size
+pub fn get() -> Option<Size> {
+    // http://rosettacode.org/wiki/Terminal_control/Dimensions#Windows
+    let handle = unsafe {
+        CreateFileA(
+            b"CONOUT$\0".as_ptr() as *const i8,
+            GENERIC_READ | GENERIC_WRITE,
+            FILE_SHARE_WRITE,
+            ptr::null_mut(),
+            OPEN_EXISTING,
+            0,
+            ptr::null_mut(),
+        )
+    };
+    if handle == INVALID_HANDLE_VALUE {
+        return None;
+    }
+    let info = unsafe {
+        // https://msdn.microsoft.com/en-us/library/windows/desktop/ms683171(v=vs.85).aspx
+        let mut info = ::std::mem::MaybeUninit::<CONSOLE_SCREEN_BUFFER_INFO>::uninit();
+        if GetConsoleScreenBufferInfo(handle, info.as_mut_ptr()) == 0 {
+            None
+        } else {
+            Some(info.assume_init())
+        }
+    };
+    info.map(|inf| Size {
+        rows: (inf.srWindow.Bottom - inf.srWindow.Top + 1) as u16,
+        cols: (inf.srWindow.Right - inf.srWindow.Left + 1) as u16,
+    })
+}
diff --git a/crates/yt/Cargo.toml b/crates/yt/Cargo.toml
new file mode 100644
index 0000000..c3ed3b0
--- /dev/null
+++ b/crates/yt/Cargo.toml
@@ -0,0 +1,66 @@
+# yt - A fully featured command line YouTube client
+#
+# Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+# Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Yt.
+#
+# You should have received a copy of the License along with this program.
+# If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+[package]
+name = "yt"
+description = "A fully featured command line YouTube client"
+keywords = []
+categories = []
+default-run = "yt"
+version.workspace = true
+edition.workspace = true
+authors.workspace = true
+license.workspace = true
+repository.workspace = true
+rust-version.workspace = true
+publish = false
+
+[dependencies]
+anyhow = "1.0.98"
+blake3 = "1.8.2"
+chrono = { version = "0.4.41", features = ["now"] }
+chrono-humanize = "0.2.3"
+clap = { version = "4.5.40", features = ["derive"] }
+clap_complete = { version = "4.5.54", features = ["unstable-dynamic"] }
+futures = "0.3.31"
+owo-colors = "4.2.2"
+regex = "1.11.1"
+sqlx = { version = "0.8.6", features = ["runtime-tokio", "sqlite"] }
+stderrlog = "0.6.0"
+tempfile = "3.20.0"
+toml = "0.8.23"
+xdg = "3.0.0"
+shlex = "1.3.0"
+bytes.workspace = true
+libmpv2.workspace = true
+log.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+tokio.workspace = true
+url.workspace = true
+yt_dlp.workspace = true
+termsize.workspace = true
+uu_fmt.workspace = true
+notify = { version = "8.0.0", default-features = false }
+tokio-util = { version = "0.7.15", features = ["rt"] }
+
+[[bin]]
+name = "yt"
+doc = false
+path = "src/main.rs"
+
+[dev-dependencies]
+
+[lints]
+workspace = true
+
+[package.metadata.docs.rs]
+all-features = true
diff --git a/crates/yt/src/ansi_escape_codes.rs b/crates/yt/src/ansi_escape_codes.rs
new file mode 100644
index 0000000..462a126
--- /dev/null
+++ b/crates/yt/src/ansi_escape_codes.rs
@@ -0,0 +1,36 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+// see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
+const CSI: &str = "\x1b[";
+pub fn erase_in_display_from_cursor() {
+    print!("{CSI}0J");
+}
+pub fn cursor_up(number: usize) {
+    // HACK(@bpeetz): The default is `1` and running this command with a
+    // number of `0` results in it using the default (i.e., `1`) <2025-03-25>
+    if number != 0 {
+        print!("{CSI}{number}A");
+    }
+}
+
+pub fn clear_whole_line() {
+    eprint!("{CSI}2K");
+}
+pub fn move_to_col(x: usize) {
+    eprint!("{CSI}{x}G");
+}
+
+pub fn hide_cursor() {
+    eprint!("{CSI}?25l");
+}
+pub fn show_cursor() {
+    eprint!("{CSI}?25h");
+}
diff --git a/crates/yt/src/app.rs b/crates/yt/src/app.rs
new file mode 100644
index 0000000..15a9388
--- /dev/null
+++ b/crates/yt/src/app.rs
@@ -0,0 +1,50 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::{Context, Result};
+use log::warn;
+use sqlx::{SqlitePool, sqlite::SqliteConnectOptions};
+
+use crate::{config::Config, storage::migrate::migrate_db};
+
+#[derive(Debug)]
+pub struct App {
+    pub database: SqlitePool,
+    pub config: Config,
+}
+
+impl App {
+    pub async fn new(config: Config, should_migrate_db: bool) -> Result<Self> {
+        let options = SqliteConnectOptions::new()
+            .filename(&config.paths.database_path)
+            .optimize_on_close(true, None)
+            .create_if_missing(true);
+
+        let pool = SqlitePool::connect_with(options)
+            .await
+            .context("Failed to connect to database!")?;
+
+        let app = App {
+            database: pool,
+            config,
+        };
+
+        if should_migrate_db {
+            migrate_db(&app)
+                .await
+                .context("Failed to migrate db to new version")?;
+        } else {
+            warn!("Skipping database migration.");
+        }
+
+        Ok(app)
+    }
+}
diff --git a/crates/yt/src/cache/mod.rs b/crates/yt/src/cache/mod.rs
new file mode 100644
index 0000000..83d5ee0
--- /dev/null
+++ b/crates/yt/src/cache/mod.rs
@@ -0,0 +1,105 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::{Context, Result};
+use log::{debug, info};
+use tokio::fs;
+
+use crate::{
+    app::App,
+    storage::video_database::{
+        Video, VideoStatus, VideoStatusMarker, downloader::set_video_cache_path, get,
+    },
+};
+
+async fn invalidate_video(app: &App, video: &Video, hard: bool) -> Result<()> {
+    info!("Invalidating cache of video: '{}'", video.title);
+
+    if hard {
+        if let VideoStatus::Cached {
+            cache_path: path, ..
+        } = &video.status
+        {
+            info!("Removing cached video at: '{}'", path.display());
+            if let Err(err) = fs::remove_file(path).await.map_err(|err| err.kind()) {
+                match err {
+                    std::io::ErrorKind::NotFound => {
+                        // The path is already gone
+                        debug!(
+                            "Not actually removing path: '{}'. It is already gone.",
+                            path.display()
+                        );
+                    }
+                    err => Err(std::io::Error::from(err)).with_context(|| {
+                        format!(
+                            "Failed to delete video ('{}') cache path: '{}'.",
+                            video.title,
+                            path.display()
+                        )
+                    })?,
+                }
+            }
+        }
+    }
+
+    set_video_cache_path(app, &video.extractor_hash, None).await?;
+
+    Ok(())
+}
+
+pub async fn invalidate(app: &App, hard: bool) -> Result<()> {
+    let all_cached_things = get::videos(app, &[VideoStatusMarker::Cached]).await?;
+
+    info!("Got videos to invalidate: '{}'", all_cached_things.len());
+
+    for video in all_cached_things {
+        invalidate_video(app, &video, hard).await?;
+    }
+
+    Ok(())
+}
+
+/// # Panics
+/// Only if internal assertions fail.
+pub async fn maintain(app: &App, all: bool) -> Result<()> {
+    let domain = if all {
+        VideoStatusMarker::ALL.as_slice()
+    } else {
+        &[VideoStatusMarker::Watch, VideoStatusMarker::Cached]
+    };
+
+    let cached_videos = get::videos(app, domain).await?;
+
+    let mut found_focused = 0;
+    for vid in cached_videos {
+        if let VideoStatus::Cached {
+            cache_path: path,
+            is_focused,
+        } = &vid.status
+        {
+            info!("Checking if path ('{}') exists", path.display());
+            if !path.exists() {
+                invalidate_video(app, &vid, false).await?;
+            }
+
+            if *is_focused {
+                found_focused += 1;
+            }
+        }
+    }
+
+    assert!(
+        found_focused <= 1,
+        "Only one video can be focused at a time"
+    );
+
+    Ok(())
+}
diff --git a/crates/yt/src/cli.rs b/crates/yt/src/cli.rs
new file mode 100644
index 0000000..98bbb2d
--- /dev/null
+++ b/crates/yt/src/cli.rs
@@ -0,0 +1,504 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    ffi::OsStr,
+    fmt::{self, Display, Formatter},
+    path::PathBuf,
+    str::FromStr,
+    thread,
+};
+
+use anyhow::Context;
+use bytes::Bytes;
+use chrono::NaiveDate;
+use clap::{ArgAction, Args, Parser, Subcommand, ValueEnum};
+use clap_complete::{ArgValueCompleter, CompletionCandidate};
+use tokio::runtime::Runtime;
+use url::Url;
+
+use crate::{
+    app::App,
+    config::Config,
+    select::selection_file::duration::MaybeDuration,
+    storage::{subscriptions, video_database::extractor_hash::LazyExtractorHash},
+};
+
+#[derive(Parser, Debug)]
+#[clap(author, about, long_about = None)]
+#[allow(clippy::module_name_repetitions)]
+/// An command line interface to select, download and watch videos
+pub struct CliArgs {
+    #[command(subcommand)]
+    /// The subcommand to execute [default: select]
+    pub command: Option<Command>,
+
+    /// Show the version and exit
+    #[arg(long, short = 'V', action= ArgAction::SetTrue)]
+    pub version: bool,
+
+    /// Do not perform database migration before starting.
+    /// Setting this could cause runtime database access errors.
+    #[arg(long, short, action=ArgAction::SetTrue, default_value_t = false)]
+    pub no_migrate_db: bool,
+
+    /// Display colors [defaults to true, if the config file has no value]
+    #[arg(long, short = 'C')]
+    pub color: Option<bool>,
+
+    /// Set the path to the videos.db. This overrides the default and the config file.
+    #[arg(long, short)]
+    pub db_path: Option<PathBuf>,
+
+    /// Set the path to the config.toml.
+    /// This overrides the default.
+    #[arg(long, short)]
+    pub config_path: Option<PathBuf>,
+
+    /// Increase message verbosity
+    #[arg(long="verbose", short = 'v', action = ArgAction::Count)]
+    pub verbosity: u8,
+
+    /// Silence all output
+    #[arg(long, short = 'q')]
+    pub quiet: bool,
+}
+
+#[derive(Subcommand, Debug)]
+pub enum Command {
+    /// Download and cache URLs
+    Download {
+        /// Forcefully re-download all cached videos (i.e. delete the cache path, then download).
+        #[arg(short, long)]
+        force: bool,
+
+        /// The maximum size the download dir should have. Beware that the value must be given in
+        /// bytes.
+        #[arg(short, long, value_parser = byte_parser)]
+        max_cache_size: Option<u64>,
+    },
+
+    /// Select, download and watch in one command.
+    Sedowa {},
+    /// Download and watch in one command.
+    Dowa {},
+
+    /// Work with single videos
+    Videos {
+        #[command(subcommand)]
+        cmd: VideosCommand,
+    },
+
+    /// Watch the already cached (and selected) videos
+    Watch {},
+
+    /// Visualize the current playlist
+    Playlist {
+        /// Linger and display changes
+        #[arg(short, long)]
+        watch: bool,
+    },
+
+    /// Show, which videos have been selected to be watched (and their cache status)
+    Status {},
+
+    /// Show, the configuration options in effect
+    Config {},
+
+    /// Display the comments of the currently playing video
+    Comments {},
+    /// Display the description of the currently playing video
+    Description {},
+
+    /// Manipulate the video cache in the database
+    #[command(visible_alias = "db")]
+    Database {
+        #[command(subcommand)]
+        command: CacheCommand,
+    },
+
+    /// Change the state of videos in the database (the default)
+    Select {
+        #[command(subcommand)]
+        cmd: Option<SelectCommand>,
+    },
+
+    /// Update the video database
+    Update {
+        /// The maximal number of videos to fetch for each subscription.
+        #[arg(short, long)]
+        max_backlog: Option<usize>,
+
+        /// How many subs were already checked.
+        ///
+        /// Only used in the progress display in combination with `--grouped`.
+        #[arg(short, long, hide = true)]
+        current_progress: Option<usize>,
+
+        /// How many subs are to be checked.
+        ///
+        /// Only used in the progress display in combination with `--grouped`.
+        #[arg(short, long, hide = true)]
+        total_number: Option<usize>,
+
+        /// The subscriptions to update
+        #[arg(add = ArgValueCompleter::new(complete_subscription))]
+        subscriptions: Vec<String>,
+
+        /// Perform the updates in blocks.
+        ///
+        /// This works around the memory leaks in the default update invocation.
+        #[arg(
+            short,
+            long,
+            conflicts_with = "total_number",
+            conflicts_with = "current_progress"
+        )]
+        grouped: bool,
+    },
+
+    /// Manipulate subscription
+    #[command(visible_alias = "subs")]
+    Subscriptions {
+        #[command(subcommand)]
+        cmd: SubscriptionCommand,
+    },
+}
+
+fn byte_parser(input: &str) -> Result<u64, anyhow::Error> {
+    Ok(input
+        .parse::<Bytes>()
+        .with_context(|| format!("Failed to parse '{input}' as bytes!"))?
+        .as_u64())
+}
+
+impl Default for Command {
+    fn default() -> Self {
+        Self::Select {
+            cmd: Some(SelectCommand::default()),
+        }
+    }
+}
+
+#[derive(Subcommand, Clone, Debug)]
+pub enum VideosCommand {
+    /// List the videos in the database
+    #[command(visible_alias = "ls")]
+    List {
+        /// An optional search query to limit the results
+        #[arg(action = ArgAction::Append)]
+        search_query: Option<String>,
+
+        /// The number of videos to show
+        #[arg(short, long)]
+        limit: Option<usize>,
+    },
+
+    /// Get detailed information about a video
+    Info {
+        /// The short hash of the video
+        hash: LazyExtractorHash,
+    },
+}
+
+#[derive(Subcommand, Clone, Debug)]
+pub enum SubscriptionCommand {
+    /// Subscribe to an URL
+    Add {
+        #[arg(short, long)]
+        /// The human readable name of the subscription
+        name: Option<String>,
+
+        /// The URL to listen to
+        url: Url,
+    },
+
+    /// Unsubscribe from an URL
+    Remove {
+        /// The human readable name of the subscription
+        #[arg(add = ArgValueCompleter::new(complete_subscription))]
+        name: String,
+    },
+
+    /// Import a bunch of URLs as subscriptions.
+    Import {
+        /// The file containing the URLs. Will use Stdin otherwise.
+        file: Option<PathBuf>,
+
+        /// Remove any previous subscriptions
+        #[arg(short, long)]
+        force: bool,
+    },
+    /// Write all subscriptions in an format understood by `import`
+    Export {},
+
+    /// List all subscriptions
+    List {},
+}
+
+#[derive(Clone, Debug, Args)]
+#[command(infer_subcommands = true)]
+/// Mark the video given by the hash to be watched
+pub struct SharedSelectionCommandArgs {
+    /// The ordering priority (higher means more at the top)
+    #[arg(short, long)]
+    pub priority: Option<i64>,
+
+    /// The subtitles to download (e.g. 'en,de,sv')
+    #[arg(short = 'l', long)]
+    pub subtitle_langs: Option<String>,
+
+    /// The speed to set mpv to
+    #[arg(short, long)]
+    pub speed: Option<f64>,
+
+    /// The short extractor hash
+    pub hash: LazyExtractorHash,
+
+    pub title: Option<String>,
+
+    pub date: Option<OptionalNaiveDate>,
+
+    pub publisher: Option<OptionalPublisher>,
+
+    pub duration: Option<MaybeDuration>,
+
+    pub url: Option<Url>,
+}
+#[derive(Clone, Debug, Copy)]
+pub struct OptionalNaiveDate {
+    pub date: Option<NaiveDate>,
+}
+impl FromStr for OptionalNaiveDate {
+    type Err = anyhow::Error;
+    fn from_str(v: &str) -> Result<Self, Self::Err> {
+        if v == "[No release date]" {
+            Ok(Self { date: None })
+        } else {
+            Ok(Self {
+                date: Some(NaiveDate::from_str(v)?),
+            })
+        }
+    }
+}
+#[derive(Clone, Debug)]
+pub struct OptionalPublisher {
+    pub publisher: Option<String>,
+}
+impl FromStr for OptionalPublisher {
+    type Err = anyhow::Error;
+    fn from_str(v: &str) -> Result<Self, Self::Err> {
+        if v == "[No author]" {
+            Ok(Self { publisher: None })
+        } else {
+            Ok(Self {
+                publisher: Some(v.to_owned()),
+            })
+        }
+    }
+}
+
+#[derive(Default, ValueEnum, Clone, Copy, Debug)]
+pub enum SelectSplitSortKey {
+    /// Sort by the name of the publisher.
+    #[default]
+    Publisher,
+
+    /// Sort by the number of unselected videos per publisher.
+    Videos,
+}
+impl Display for SelectSplitSortKey {
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        match self {
+            SelectSplitSortKey::Publisher => f.write_str("publisher"),
+            SelectSplitSortKey::Videos => f.write_str("videos"),
+        }
+    }
+}
+
+#[derive(Default, ValueEnum, Clone, Copy, Debug)]
+pub enum SelectSplitSortMode {
+    /// Sort in ascending order (small -> big)
+    #[default]
+    Asc,
+
+    /// Sort in descending order (big -> small)
+    Desc,
+}
+
+impl Display for SelectSplitSortMode {
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        match self {
+            SelectSplitSortMode::Asc => f.write_str("asc"),
+            SelectSplitSortMode::Desc => f.write_str("desc"),
+        }
+    }
+}
+
+#[derive(Subcommand, Clone, Debug)]
+// NOTE: Keep this in sync with the [`constants::HELP_STR`] constant. <2024-08-20>
+// NOTE: Also keep this in sync with the `tree-sitter-yts/grammar.js`. <2024-11-04>
+pub enum SelectCommand {
+    /// Open a `git rebase` like file to select the videos to watch (the default)
+    File {
+        /// Include done (watched, dropped) videos
+        #[arg(long, short)]
+        done: bool,
+
+        /// Use the last selection file (useful if you've spend time on it and want to get it again)
+        #[arg(long, short, conflicts_with = "done")]
+        use_last_selection: bool,
+    },
+
+    /// Generate a directory, where each file contains only one subscription.
+    Split {
+        /// Include done (watched, dropped) videos
+        #[arg(long, short)]
+        done: bool,
+
+        /// Which key to use for sorting.
+        #[arg(default_value_t)]
+        sort_key: SelectSplitSortKey,
+
+        /// Which mode to use for sorting.
+        #[arg(default_value_t)]
+        sort_mode: SelectSplitSortMode,
+    },
+
+    /// Add a video to the database
+    ///
+    /// This optionally supports to add a playlist.
+    /// When a playlist is added, the `start` and `stop` arguments can be used to select which
+    /// playlist entries to include.
+    #[command(visible_alias = "a")]
+    Add {
+        urls: Vec<Url>,
+
+        /// Start adding playlist entries at this playlist index (zero based and inclusive)
+        #[arg(short = 's', long)]
+        start: Option<usize>,
+
+        /// Stop adding playlist entries at this playlist index (zero based and inclusive)
+        #[arg(short = 'e', long)]
+        stop: Option<usize>,
+    },
+
+    /// Mark the video given by the hash to be watched
+    #[command(visible_alias = "w")]
+    Watch {
+        #[command(flatten)]
+        shared: SharedSelectionCommandArgs,
+    },
+
+    /// Mark the video given by the hash to be dropped
+    #[command(visible_alias = "d")]
+    Drop {
+        #[command(flatten)]
+        shared: SharedSelectionCommandArgs,
+    },
+
+    /// Mark the video given by the hash as already watched
+    #[command(visible_alias = "wd")]
+    Watched {
+        #[command(flatten)]
+        shared: SharedSelectionCommandArgs,
+    },
+
+    /// Open the video URL in Firefox's `timesinks.youtube` profile
+    #[command(visible_alias = "u")]
+    Url {
+        #[command(flatten)]
+        shared: SharedSelectionCommandArgs,
+    },
+
+    /// Reset the videos status to 'Pick'
+    #[command(visible_alias = "p")]
+    Pick {
+        #[command(flatten)]
+        shared: SharedSelectionCommandArgs,
+    },
+}
+impl Default for SelectCommand {
+    fn default() -> Self {
+        Self::File {
+            done: false,
+            use_last_selection: false,
+        }
+    }
+}
+
+#[derive(Subcommand, Clone, Copy, Debug)]
+pub enum CacheCommand {
+    /// Invalidate all cache entries
+    Invalidate {
+        /// Also delete the cache path
+        #[arg(short = 'f', long)]
+        hard: bool,
+    },
+
+    /// Perform basic maintenance operations on the database.
+    /// This helps recovering from invalid db states after a crash (or force exit via <CTRL-C>).
+    ///
+    /// 1. Check every path for validity (removing all invalid cache entries)
+    #[command(verbatim_doc_comment)]
+    Maintain {
+        /// Check every video (otherwise only the videos to be watched are checked)
+        #[arg(short, long)]
+        all: bool,
+    },
+}
+
+fn complete_subscription(current: &OsStr) -> Vec<CompletionCandidate> {
+    let mut output = vec![];
+
+    let Some(current_prog) = current.to_str().map(ToOwned::to_owned) else {
+        return output;
+    };
+
+    let Ok(config) = Config::from_config_file(None, None, None) else {
+        return output;
+    };
+
+    let handle = thread::spawn(move || {
+        let Ok(rt) = Runtime::new() else {
+            return output;
+        };
+
+        let Ok(app) = rt.block_on(App::new(config, false)) else {
+            return output;
+        };
+
+        let Ok(all) = rt.block_on(subscriptions::get(&app)) else {
+            return output;
+        };
+
+        for sub in all.0.into_keys() {
+            if sub.starts_with(&current_prog) {
+                output.push(CompletionCandidate::new(sub));
+            }
+        }
+
+        output
+    });
+
+    handle.join().unwrap_or_default()
+}
+
+#[cfg(test)]
+mod test {
+    use clap::CommandFactory;
+
+    use super::CliArgs;
+    #[test]
+    fn verify_cli() {
+        CliArgs::command().debug_assert();
+    }
+}
diff --git a/crates/yt/src/comments/comment.rs b/crates/yt/src/comments/comment.rs
new file mode 100644
index 0000000..5bc939c
--- /dev/null
+++ b/crates/yt/src/comments/comment.rs
@@ -0,0 +1,152 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use serde::{Deserialize, Deserializer, Serialize};
+use url::Url;
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[serde(from = "String")]
+#[serde(deny_unknown_fields)]
+pub enum Parent {
+    Root,
+    Id(String),
+}
+
+impl Parent {
+    #[must_use]
+    pub fn id(&self) -> Option<&str> {
+        if let Self::Id(id) = self {
+            Some(id)
+        } else {
+            None
+        }
+    }
+}
+
+impl From<String> for Parent {
+    fn from(value: String) -> Self {
+        if value == "root" {
+            Self::Root
+        } else {
+            Self::Id(value)
+        }
+    }
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[serde(from = "String")]
+#[serde(deny_unknown_fields)]
+pub struct Id {
+    pub id: String,
+}
+impl From<String> for Id {
+    fn from(value: String) -> Self {
+        Self {
+            // Take the last element if the string is split with dots, otherwise take the full id
+            id: value.split('.').last().unwrap_or(&value).to_owned(),
+        }
+    }
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[allow(clippy::struct_excessive_bools)]
+pub struct Comment {
+    pub id: Id,
+    pub text: String,
+    #[serde(default = "zero")]
+    pub like_count: u32,
+    pub is_pinned: bool,
+    pub author_id: String,
+    #[serde(default = "unknown")]
+    pub author: String,
+    pub author_is_verified: bool,
+    pub author_thumbnail: Url,
+    pub parent: Parent,
+    #[serde(deserialize_with = "edited_from_time_text", alias = "_time_text")]
+    pub edited: bool,
+    // Can't also be deserialized, as it's already used in 'edited'
+    // _time_text: String,
+    pub timestamp: i64,
+    pub author_url: Option<Url>,
+    pub author_is_uploader: bool,
+    pub is_favorited: bool,
+}
+
+fn unknown() -> String {
+    "<Unknown>".to_string()
+}
+fn zero() -> u32 {
+    0
+}
+fn edited_from_time_text<'de, D>(d: D) -> Result<bool, D::Error>
+where
+    D: Deserializer<'de>,
+{
+    let s = String::deserialize(d)?;
+    if s.contains(" (edited)") {
+        Ok(true)
+    } else {
+        Ok(false)
+    }
+}
+
+#[derive(Debug, Clone)]
+#[allow(clippy::module_name_repetitions)]
+pub struct CommentExt {
+    pub value: Comment,
+    pub replies: Vec<CommentExt>,
+}
+
+#[derive(Debug, Default)]
+pub struct Comments {
+    pub(super) vec: Vec<CommentExt>,
+}
+
+impl Comments {
+    pub fn new() -> Self {
+        Self::default()
+    }
+    pub fn push(&mut self, value: CommentExt) {
+        self.vec.push(value);
+    }
+    pub fn get_mut(&mut self, key: &str) -> Option<&mut CommentExt> {
+        self.vec.iter_mut().filter(|c| c.value.id.id == key).last()
+    }
+    pub fn insert(&mut self, key: &str, value: CommentExt) {
+        let parent = self
+            .vec
+            .iter_mut()
+            .filter(|c| c.value.id.id == key)
+            .last()
+            .expect("One of these should exist");
+        parent.push_reply(value);
+    }
+}
+impl CommentExt {
+    pub fn push_reply(&mut self, value: CommentExt) {
+        self.replies.push(value);
+    }
+    pub fn get_mut_reply(&mut self, key: &str) -> Option<&mut CommentExt> {
+        self.replies
+            .iter_mut()
+            .filter(|c| c.value.id.id == key)
+            .last()
+    }
+}
+
+impl From<Comment> for CommentExt {
+    fn from(value: Comment) -> Self {
+        Self {
+            replies: vec![],
+            value,
+        }
+    }
+}
diff --git a/crates/yt/src/comments/description.rs b/crates/yt/src/comments/description.rs
new file mode 100644
index 0000000..878b573
--- /dev/null
+++ b/crates/yt/src/comments/description.rs
@@ -0,0 +1,46 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use crate::{
+    App,
+    comments::output::display_fmt_and_less,
+    storage::video_database::{Video, get},
+    unreachable::Unreachable,
+};
+
+use anyhow::{Result, bail};
+use yt_dlp::json_cast;
+
+pub async fn description(app: &App) -> Result<()> {
+    let description = get(app).await?;
+    display_fmt_and_less(description).await?;
+
+    Ok(())
+}
+
+pub async fn get(app: &App) -> Result<String> {
+    let currently_playing_video: Video =
+        if let Some(video) = get::currently_focused_video(app).await? {
+            video
+        } else {
+            bail!("Could not find a currently playing video!");
+        };
+
+    let info_json = get::video_info_json(&currently_playing_video)?.unreachable(
+        "A currently *playing* must be cached. And thus the info.json should be available",
+    );
+
+    Ok(info_json
+        .get("description")
+        .map(|val| json_cast!(val, as_str))
+        .unwrap_or("<No description>")
+        .to_owned())
+}
diff --git a/crates/yt/src/comments/display.rs b/crates/yt/src/comments/display.rs
new file mode 100644
index 0000000..6166b2b
--- /dev/null
+++ b/crates/yt/src/comments/display.rs
@@ -0,0 +1,118 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::fmt::Write;
+
+use chrono::{Local, TimeZone};
+use chrono_humanize::{Accuracy, HumanTime, Tense};
+
+use crate::comments::comment::CommentExt;
+
+use super::comment::Comments;
+
+impl Comments {
+    pub fn render(&self, color: bool) -> String {
+        self.render_help(color).expect("This should never fail.")
+    }
+
+    fn render_help(&self, color: bool) -> Result<String, std::fmt::Error> {
+        macro_rules! c {
+            ($color_str:expr, $write:ident, $color:expr) => {
+                if $color {
+                    $write.write_str(concat!("\x1b[", $color_str, "m"))?
+                }
+            };
+        }
+
+        fn format(
+            comment: &CommentExt,
+            f: &mut String,
+            ident_count: u32,
+            color: bool,
+        ) -> std::fmt::Result {
+            let ident = &(0..ident_count).map(|_| " ").collect::<String>();
+            let value = &comment.value;
+
+            f.write_str(ident)?;
+
+            if value.author_is_uploader {
+                c!("91;1", f, color);
+            } else {
+                c!("35", f, color);
+            }
+
+            f.write_str(&value.author)?;
+            c!("0", f, color);
+            if value.edited || value.is_favorited {
+                f.write_str("[")?;
+                if value.edited {
+                    f.write_str("")?;
+                }
+                if value.edited && value.is_favorited {
+                    f.write_str(" ")?;
+                }
+                if value.is_favorited {
+                    f.write_str("")?;
+                }
+                f.write_str("]")?;
+            }
+
+            c!("36;1", f, color);
+            write!(
+                f,
+                " {}",
+                HumanTime::from(
+                    Local
+                        .timestamp_opt(value.timestamp, 0)
+                        .single()
+                        .expect("This should be valid")
+                )
+                .to_text_en(Accuracy::Rough, Tense::Past)
+            )?;
+            c!("0", f, color);
+
+            // c!("31;1", f);
+            // f.write_fmt(format_args!(" [{}]", comment.value.like_count))?;
+            // c!("0", f);
+
+            f.write_str(":\n")?;
+            f.write_str(ident)?;
+
+            f.write_str(&value.text.replace('\n', &format!("\n{ident}")))?;
+            f.write_str("\n")?;
+
+            if comment.replies.is_empty() {
+                f.write_str("\n")?;
+            } else {
+                let mut children = comment.replies.clone();
+                children.sort_by(|a, b| a.value.timestamp.cmp(&b.value.timestamp));
+
+                for child in children {
+                    format(&child, f, ident_count + 4, color)?;
+                }
+            }
+
+            Ok(())
+        }
+
+        let mut f = String::new();
+
+        if !&self.vec.is_empty() {
+            let mut children = self.vec.clone();
+            children.sort_by(|a, b| b.value.like_count.cmp(&a.value.like_count));
+
+            for child in children {
+                format(&child, &mut f, 0, color)?;
+            }
+        }
+        Ok(f)
+    }
+}
diff --git a/crates/yt/src/comments/mod.rs b/crates/yt/src/comments/mod.rs
new file mode 100644
index 0000000..54031a4
--- /dev/null
+++ b/crates/yt/src/comments/mod.rs
@@ -0,0 +1,167 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::mem;
+
+use anyhow::{Result, bail};
+use comment::{Comment, CommentExt, Comments, Parent};
+use output::display_fmt_and_less;
+use regex::Regex;
+use yt_dlp::json_cast;
+
+use crate::{
+    app::App,
+    storage::video_database::{Video, get},
+    unreachable::Unreachable,
+};
+
+mod comment;
+mod display;
+pub mod output;
+
+pub mod description;
+pub use description::*;
+
+#[allow(clippy::too_many_lines)]
+pub async fn get(app: &App) -> Result<Comments> {
+    let currently_playing_video: Video =
+        if let Some(video) = get::currently_focused_video(app).await? {
+            video
+        } else {
+            bail!("Could not find a currently playing video!");
+        };
+
+    let info_json = get::video_info_json(&currently_playing_video)?.unreachable(
+        "A currently *playing* video must be cached. And thus the info.json should be available",
+    );
+
+    let base_comments = if let Some(comments) = info_json.get("comments") {
+        json_cast!(comments, as_array)
+    } else {
+        bail!(
+            "The video ('{}') does not have comments!",
+            info_json
+                .get("title")
+                .map(|val| json_cast!(val, as_str))
+                .unwrap_or("<No Title>")
+        )
+    };
+
+    let mut comments = Comments::new();
+    for c in base_comments {
+        let c: Comment = serde_json::from_value(c.to_owned())?;
+        if let Parent::Id(id) = &c.parent {
+            comments.insert(&(id.clone()), CommentExt::from(c));
+        } else {
+            comments.push(CommentExt::from(c));
+        }
+    }
+
+    comments.vec.iter_mut().for_each(|comment| {
+       let replies = mem::take(&mut comment.replies);
+       let mut output_replies: Vec<CommentExt>  = vec![];
+
+       let re = Regex::new(r"\u{200b}?(@[^\t\s]+)\u{200b}?").unreachable("This is hardcoded");
+       for reply in replies {
+           if let Some(replyee_match) =  re.captures(&reply.value.text){
+               let full_match = replyee_match.get(0).unreachable("This will always exist");
+               let text = reply.
+                   value.
+                   text[0..full_match.start()]
+                   .to_owned()
+                   +
+                   &reply
+                   .value
+                   .text[full_match.end()..];
+               let text: &str = text.trim().trim_matches('\u{200b}');
+
+               let replyee = replyee_match.get(1).unreachable("This should also exist").as_str();
+
+
+               if let Some(parent) = output_replies
+                   .iter_mut()
+                   // .rev()
+                   .flat_map(|com| &mut com.replies)
+                   .flat_map(|com| &mut com.replies)
+                   .flat_map(|com| &mut com.replies)
+                   .filter(|com| com.value.author == replyee)
+                   .last()
+               {
+                   parent.replies.push(CommentExt::from(Comment {
+                       text: text.to_owned(),
+                       ..reply.value
+                   }));
+               } else if let Some(parent) = output_replies
+                   .iter_mut()
+                   // .rev()
+                   .flat_map(|com| &mut com.replies)
+                   .flat_map(|com| &mut com.replies)
+                   .filter(|com| com.value.author == replyee)
+                   .last()
+               {
+                   parent.replies.push(CommentExt::from(Comment {
+                       text: text.to_owned(),
+                       ..reply.value
+                   }));
+               } else if let Some(parent) = output_replies
+                   .iter_mut()
+                   // .rev()
+                   .flat_map(|com| &mut com.replies)
+                   .filter(|com| com.value.author == replyee)
+                   .last()
+               {
+                   parent.replies.push(CommentExt::from(Comment {
+                       text: text.to_owned(),
+                       ..reply.value
+                   }));
+               } else if let Some(parent) = output_replies.iter_mut()
+                   // .rev()
+                   .filter(|com| com.value.author == replyee)
+                   .last()
+               {
+                   parent.replies.push(CommentExt::from(Comment {
+                       text: text.to_owned(),
+                       ..reply.value
+                   }));
+               } else {
+                   eprintln!(
+                   "Failed to find a parent for ('{}') both directly and via replies! The reply text was:\n'{}'\n",
+                   replyee,
+                   reply.value.text
+               );
+                   output_replies.push(reply);
+               }
+           } else {
+               output_replies.push(reply);
+           }
+       }
+       comment.replies = output_replies;
+    });
+
+    Ok(comments)
+}
+
+pub async fn comments(app: &App) -> Result<()> {
+    let comments = get(app).await?;
+
+    display_fmt_and_less(comments.render(true)).await?;
+
+    Ok(())
+}
+
+#[cfg(test)]
+mod test {
+    #[test]
+    fn test_string_replacement() {
+        let s = "A \n\nB\n\nC".to_owned();
+        assert_eq!("A \n  \n  B\n  \n  C", s.replace('\n', "\n  "));
+    }
+}
diff --git a/crates/yt/src/comments/output.rs b/crates/yt/src/comments/output.rs
new file mode 100644
index 0000000..cb3a9c4
--- /dev/null
+++ b/crates/yt/src/comments/output.rs
@@ -0,0 +1,53 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    io::Write,
+    process::{Command, Stdio},
+};
+
+use anyhow::{Context, Result};
+use uu_fmt::{FmtOptions, process_text};
+
+use crate::unreachable::Unreachable;
+
+pub async fn display_fmt_and_less(input: String) -> Result<()> {
+    let mut less = Command::new("less")
+        .args(["--raw-control-chars"])
+        .stdin(Stdio::piped())
+        .stderr(Stdio::inherit())
+        .spawn()
+        .context("Failed to run less")?;
+
+    let input = format_text(&input);
+    let mut stdin = less.stdin.take().context("Failed to open stdin")?;
+    std::thread::spawn(move || {
+        stdin
+            .write_all(input.as_bytes())
+            .unreachable("Should be able to write to the stdin of less");
+    });
+
+    let _ = less.wait().context("Failed to await less")?;
+
+    Ok(())
+}
+
+#[must_use]
+pub fn format_text(input: &str) -> String {
+    let width = termsize::get().map_or(90, |size| size.cols);
+    let fmt_opts = FmtOptions {
+        uniform: true,
+        split_only: true,
+        ..FmtOptions::new(Some(width as usize), None, Some(4))
+    };
+
+    process_text(input, &fmt_opts)
+}
diff --git a/crates/yt/src/config/default.rs b/crates/yt/src/config/default.rs
new file mode 100644
index 0000000..4ed643b
--- /dev/null
+++ b/crates/yt/src/config/default.rs
@@ -0,0 +1,110 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::path::PathBuf;
+
+use anyhow::{Context, Result};
+
+fn get_runtime_path(name: &'static str) -> Result<PathBuf> {
+    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX);
+    xdg_dirs
+        .place_runtime_file(name)
+        .with_context(|| format!("Failed to place runtime file: '{name}'"))
+}
+fn get_data_path(name: &'static str) -> Result<PathBuf> {
+    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX);
+    xdg_dirs
+        .place_data_file(name)
+        .with_context(|| format!("Failed to place data file: '{name}'"))
+}
+fn get_config_path(name: &'static str) -> Result<PathBuf> {
+    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX);
+    xdg_dirs
+        .place_config_file(name)
+        .with_context(|| format!("Failed to place config file: '{name}'"))
+}
+
+pub(super) fn create_path(path: PathBuf) -> Result<PathBuf> {
+    if !path.exists() {
+        if let Some(parent) = path.parent() {
+            std::fs::create_dir_all(parent)
+                .with_context(|| format!("Failed to create the '{}' directory", path.display()))?;
+        }
+    }
+
+    Ok(path)
+}
+
+pub(crate) const PREFIX: &str = "yt";
+
+pub(crate) mod global {
+    pub(crate) fn display_colors() -> bool {
+        // TODO: This should probably check if the output is a tty and otherwise return `false` <2025-02-14>
+        true
+    }
+}
+
+pub(crate) mod select {
+    pub(crate) fn playback_speed() -> f64 {
+        2.7
+    }
+    pub(crate) fn subtitle_langs() -> &'static str {
+        ""
+    }
+}
+
+pub(crate) mod watch {
+    pub(crate) fn local_displays_length() -> usize {
+        1000
+    }
+}
+
+pub(crate) mod update {
+    pub(crate) fn max_backlog() -> usize {
+        20
+    }
+}
+
+pub(crate) mod paths {
+    use std::{env::temp_dir, path::PathBuf};
+
+    use anyhow::Result;
+
+    use super::{PREFIX, create_path, get_config_path, get_data_path, get_runtime_path};
+
+    // We download to the temp dir to avoid taxing the disk
+    pub(crate) fn download_dir() -> Result<PathBuf> {
+        let temp_dir = temp_dir();
+
+        create_path(temp_dir.join(PREFIX))
+    }
+    pub(crate) fn mpv_config_path() -> Result<PathBuf> {
+        get_config_path("mpv.conf")
+    }
+    pub(crate) fn mpv_input_path() -> Result<PathBuf> {
+        get_config_path("mpv.input.conf")
+    }
+    pub(crate) fn database_path() -> Result<PathBuf> {
+        get_data_path("videos.sqlite")
+    }
+    pub(crate) fn config_path() -> Result<PathBuf> {
+        get_config_path("config.toml")
+    }
+    pub(crate) fn last_selection_path() -> Result<PathBuf> {
+        get_runtime_path("selected.yts")
+    }
+}
+
+pub(crate) mod download {
+    pub(crate) fn max_cache_size() -> &'static str {
+        "3 GiB"
+    }
+}
diff --git a/crates/yt/src/config/definitions.rs b/crates/yt/src/config/definitions.rs
new file mode 100644
index 0000000..ce8c0d4
--- /dev/null
+++ b/crates/yt/src/config/definitions.rs
@@ -0,0 +1,67 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::path::PathBuf;
+
+use serde::Deserialize;
+
+#[derive(Debug, Deserialize, PartialEq)]
+#[serde(deny_unknown_fields)]
+pub(crate) struct ConfigFile {
+    pub global: Option<GlobalConfig>,
+    pub select: Option<SelectConfig>,
+    pub watch: Option<WatchConfig>,
+    pub paths: Option<PathsConfig>,
+    pub download: Option<DownloadConfig>,
+    pub update: Option<UpdateConfig>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Clone, Copy)]
+#[serde(deny_unknown_fields)]
+pub(crate) struct GlobalConfig {
+    pub display_colors: Option<bool>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Clone, Copy)]
+#[serde(deny_unknown_fields)]
+pub(crate) struct UpdateConfig {
+    pub max_backlog: Option<usize>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Clone)]
+#[serde(deny_unknown_fields)]
+pub(crate) struct DownloadConfig {
+    /// This will then be converted to an u64
+    pub max_cache_size: Option<String>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Clone)]
+#[serde(deny_unknown_fields)]
+pub(crate) struct SelectConfig {
+    pub playback_speed: Option<f64>,
+    pub subtitle_langs: Option<String>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Clone, Copy)]
+#[serde(deny_unknown_fields)]
+pub(crate) struct WatchConfig {
+    pub local_displays_length: Option<usize>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Clone)]
+#[serde(deny_unknown_fields)]
+pub(crate) struct PathsConfig {
+    pub download_dir: Option<PathBuf>,
+    pub mpv_config_path: Option<PathBuf>,
+    pub mpv_input_path: Option<PathBuf>,
+    pub database_path: Option<PathBuf>,
+    pub last_selection_path: Option<PathBuf>,
+}
diff --git a/crates/yt/src/config/file_system.rs b/crates/yt/src/config/file_system.rs
new file mode 100644
index 0000000..2463e9d
--- /dev/null
+++ b/crates/yt/src/config/file_system.rs
@@ -0,0 +1,120 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use crate::config::{DownloadConfig, PathsConfig, SelectConfig, WatchConfig};
+
+use super::{
+    Config, GlobalConfig, UpdateConfig,
+    default::{create_path, download, global, paths, select, update, watch},
+};
+
+use std::{fs::read_to_string, path::PathBuf};
+
+use anyhow::{Context, Result};
+use bytes::Bytes;
+
+macro_rules! get {
+    ($default:path, $config:expr, $key_one:ident, $($keys:ident),*) => {
+        {
+            let maybe_value = get!{@option $config, $key_one, $($keys),*};
+            if let Some(value) = maybe_value {
+                value
+            } else {
+                $default().to_owned()
+            }
+        }
+    };
+
+    (@option $config:expr, $key_one:ident, $($keys:ident),*) => {
+        if let Some(key) = $config.$key_one.clone() {
+            get!{@option key, $($keys),*}
+        } else {
+            None
+        }
+    };
+    (@option $config:expr, $key_one:ident) => {
+        $config.$key_one
+    };
+
+    (@path_if_none $config:expr, $option_default:expr, $default:path, $key_one:ident, $($keys:ident),*) => {
+        {
+            let maybe_download_dir: Option<PathBuf> =
+                get! {@option $config, $key_one, $($keys),*};
+
+            let down_dir = if let Some(dir) = maybe_download_dir {
+                PathBuf::from(dir)
+            } else {
+                if let Some(path) = $option_default {
+                    path
+                } else {
+                    $default()
+                        .with_context(|| format!("Failed to get default path for: '{}.{}'", stringify!($key_one), stringify!($($keys),*)))?
+                }
+            };
+            create_path(down_dir)?
+        }
+    };
+    (@path $config:expr, $default:path, $key_one:ident, $($keys:ident),*) => {
+        get! {@path_if_none $config, None, $default, $key_one, $($keys),*}
+    };
+}
+
+impl Config {
+    pub fn from_config_file(
+        db_path: Option<PathBuf>,
+        config_path: Option<PathBuf>,
+        display_colors: Option<bool>,
+    ) -> Result<Self> {
+        let config_file_path =
+            config_path.map_or_else(|| -> Result<_> { paths::config_path() }, Ok)?;
+
+        let config: super::definitions::ConfigFile =
+            toml::from_str(&read_to_string(config_file_path).unwrap_or(String::new()))
+                .context("Failed to parse the config file as toml")?;
+
+        Ok(Self {
+            global: GlobalConfig {
+                display_colors: {
+                    let config_value: Option<bool> = get! {@option config, global, display_colors};
+
+                    display_colors.unwrap_or(config_value.unwrap_or_else(global::display_colors))
+                },
+            },
+            select: SelectConfig {
+                playback_speed: get! {select::playback_speed, config, select, playback_speed},
+                subtitle_langs: get! {select::subtitle_langs, config, select, subtitle_langs},
+            },
+            watch: WatchConfig {
+                local_displays_length: get! {watch::local_displays_length, config, watch, local_displays_length},
+            },
+            update: UpdateConfig {
+                max_backlog: get! {update::max_backlog, config, update, max_backlog},
+            },
+            paths: PathsConfig {
+                download_dir: get! {@path config, paths::download_dir, paths, download_dir},
+                mpv_config_path: get! {@path config, paths::mpv_config_path, paths, mpv_config_path},
+                mpv_input_path: get! {@path config, paths::mpv_input_path, paths, mpv_input_path},
+                database_path: get! {@path_if_none config, db_path, paths::database_path, paths, database_path},
+                last_selection_path: get! {@path config, paths::last_selection_path, paths, last_selection_path},
+            },
+            download: DownloadConfig {
+                max_cache_size: {
+                    let bytes_str: String =
+                        get! {download::max_cache_size, config, download, max_cache_size};
+                    let number: Bytes = bytes_str
+                        .parse()
+                        .context("Failed to parse max_cache_size")?;
+                    number
+                },
+            },
+        })
+    }
+}
diff --git a/crates/yt/src/config/mod.rs b/crates/yt/src/config/mod.rs
new file mode 100644
index 0000000..a10f7c2
--- /dev/null
+++ b/crates/yt/src/config/mod.rs
@@ -0,0 +1,76 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+#![allow(clippy::module_name_repetitions)]
+
+use std::path::PathBuf;
+
+use bytes::Bytes;
+use serde::Serialize;
+
+mod default;
+mod definitions;
+pub mod file_system;
+
+#[derive(Serialize, Debug)]
+pub struct Config {
+    pub global: GlobalConfig,
+    pub select: SelectConfig,
+    pub watch: WatchConfig,
+    pub paths: PathsConfig,
+    pub download: DownloadConfig,
+    pub update: UpdateConfig,
+}
+// These structures could get non-copy fields in the future.
+
+#[derive(Serialize, Debug)]
+#[allow(missing_copy_implementations)]
+pub struct GlobalConfig {
+    pub display_colors: bool,
+}
+#[derive(Serialize, Debug)]
+#[allow(missing_copy_implementations)]
+pub struct UpdateConfig {
+    pub max_backlog: usize,
+}
+#[derive(Serialize, Debug)]
+#[allow(missing_copy_implementations)]
+pub struct DownloadConfig {
+    pub max_cache_size: Bytes,
+}
+#[derive(Serialize, Debug)]
+pub struct SelectConfig {
+    pub playback_speed: f64,
+    pub subtitle_langs: String,
+}
+#[derive(Serialize, Debug)]
+#[allow(missing_copy_implementations)]
+pub struct WatchConfig {
+    pub local_displays_length: usize,
+}
+#[derive(Serialize, Debug)]
+pub struct PathsConfig {
+    pub download_dir: PathBuf,
+    pub mpv_config_path: PathBuf,
+    pub mpv_input_path: PathBuf,
+    pub database_path: PathBuf,
+    pub last_selection_path: PathBuf,
+}
+
+// pub fn status_path() -> anyhow::Result<PathBuf> {
+//     const STATUS_PATH: &str = "running.info.json";
+//     get_runtime_path(STATUS_PATH)
+// }
+
+// pub fn subscriptions() -> anyhow::Result<PathBuf> {
+//     const SUBSCRIPTIONS: &str = "subscriptions.json";
+//     get_data_path(SUBSCRIPTIONS)
+// }
diff --git a/crates/yt/src/constants.rs b/crates/yt/src/constants.rs
new file mode 100644
index 0000000..0f5b918
--- /dev/null
+++ b/crates/yt/src/constants.rs
@@ -0,0 +1,12 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+pub const HELP_STR: &str = include_str!("./select/selection_file/help.str");
diff --git a/crates/yt/src/download/download_options.rs b/crates/yt/src/download/download_options.rs
new file mode 100644
index 0000000..558adfd
--- /dev/null
+++ b/crates/yt/src/download/download_options.rs
@@ -0,0 +1,118 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::Context;
+use serde_json::{Value, json};
+use yt_dlp::{YoutubeDL, options::YoutubeDLOptions};
+
+use crate::{app::App, storage::video_database::YtDlpOptions};
+
+use super::progress_hook::wrapped_progress_hook;
+
+pub fn download_opts(app: &App, additional_opts: &YtDlpOptions) -> anyhow::Result<YoutubeDL> {
+    YoutubeDLOptions::new()
+        .with_progress_hook(wrapped_progress_hook)
+        .set("extract_flat", "in_playlist")
+        .set(
+            "extractor_args",
+            json! {
+            {
+                "youtube": {
+                    "comment_sort": [ "top" ],
+                    "max_comments": [ "150", "all", "100" ]
+                }
+            }
+            },
+        )
+        //.set("cookiesfrombrowser", json! {("firefox", "me.google", None::<String>, "youtube_dlp")})
+        .set("prefer_free_formats", true)
+        .set("ffmpeg_location", env!("FFMPEG_LOCATION"))
+        .set("format", "bestvideo[height<=?1080]+bestaudio/best")
+        .set("fragment_retries", 10)
+        .set("getcomments", true)
+        .set("ignoreerrors", false)
+        .set("retries", 10)
+        .set("writeinfojson", true)
+        // NOTE: This results in a constant warning message.  <2025-01-04>
+        //.set("writeannotations", true)
+        .set("writesubtitles", true)
+        .set("writeautomaticsub", true)
+        .set(
+            "outtmpl",
+            json! {
+            {
+                "default": app.config.paths.download_dir.join("%(channel)s/%(title)s.%(ext)s"),
+                "chapter": "%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s"
+            }
+            },
+        )
+        .set("compat_opts", json! {{}})
+        .set("forceprint", json! {{}})
+        .set("print_to_file", json! {{}})
+        .set("windowsfilenames", false)
+        .set("restrictfilenames", false)
+        .set("trim_file_names", false)
+        .set(
+            "postprocessors",
+            json! {
+            [
+                {
+                    "api": "https://sponsor.ajay.app",
+                    "categories": [
+                        "interaction",
+                        "intro",
+                        "music_offtopic",
+                        "sponsor",
+                        "outro",
+                        "poi_highlight",
+                        "preview",
+                        "selfpromo",
+                        "filler",
+                        "chapter"
+                    ],
+                    "key": "SponsorBlock",
+                    "when": "after_filter"
+                },
+                {
+                    "force_keyframes": false,
+                    "key": "ModifyChapters",
+                    "remove_chapters_patterns": [],
+                    "remove_ranges": [],
+                    "remove_sponsor_segments": [ "sponsor" ],
+                    "sponsorblock_chapter_title": "[SponsorBlock]: %(category_names)l"
+                },
+                {
+                    "add_chapters": true,
+                    "add_infojson": null,
+                    "add_metadata": false,
+                    "key": "FFmpegMetadata"
+                },
+                {
+                    "key": "FFmpegConcat",
+                    "only_multi_video": true,
+                    "when": "playlist"
+                }
+            ]
+            },
+        )
+        .set(
+            "subtitleslangs",
+            Value::Array(
+                additional_opts
+                    .subtitle_langs
+                    .split(',')
+                    .map(|val| Value::String(val.to_owned()))
+                    .collect::<Vec<_>>(),
+            ),
+        )
+        .build()
+        .context("Failed to instanciate download yt_dlp")
+}
diff --git a/crates/yt/src/download/mod.rs b/crates/yt/src/download/mod.rs
new file mode 100644
index 0000000..6065cf9
--- /dev/null
+++ b/crates/yt/src/download/mod.rs
@@ -0,0 +1,369 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{collections::HashMap, io, str::FromStr, sync::Arc, time::Duration};
+
+use crate::{
+    app::App,
+    download::download_options::download_opts,
+    storage::video_database::{
+        Video, YtDlpOptions,
+        downloader::{get_next_uncached_video, set_video_cache_path},
+        extractor_hash::ExtractorHash,
+        get::get_video_yt_dlp_opts,
+        notify::wait_for_cache_reduction,
+    },
+    unreachable::Unreachable,
+};
+
+use anyhow::{Context, Result, bail};
+use bytes::Bytes;
+use futures::{FutureExt, future::BoxFuture};
+use log::{debug, error, info, warn};
+use tokio::{fs, task::JoinHandle, time};
+use yt_dlp::{json_cast, json_get};
+
+#[allow(clippy::module_name_repetitions)]
+pub mod download_options;
+pub mod progress_hook;
+
+#[derive(Debug)]
+#[allow(clippy::module_name_repetitions)]
+pub struct CurrentDownload {
+    task_handle: JoinHandle<Result<()>>,
+    extractor_hash: ExtractorHash,
+}
+
+impl CurrentDownload {
+    fn new_from_video(app: Arc<App>, video: Video) -> Self {
+        let extractor_hash = video.extractor_hash;
+
+        let task_handle = tokio::spawn(async move {
+            Downloader::actually_cache_video(&app, &video)
+                .await
+                .with_context(|| format!("Failed to cache video: '{}'", video.title))?;
+            Ok(())
+        });
+
+        Self {
+            task_handle,
+            extractor_hash,
+        }
+    }
+}
+
+enum CacheSizeCheck {
+    /// The video can be downloaded
+    Fits,
+
+    /// The video and the current cache size together would exceed the size
+    TooLarge,
+
+    /// The video would not even fit into the empty cache
+    ExceedsMaxCacheSize,
+}
+
+#[derive(Debug)]
+pub struct Downloader {
+    current_download: Option<CurrentDownload>,
+    video_size_cache: HashMap<ExtractorHash, u64>,
+    printed_warning: bool,
+    cached_cache_allocation: Option<Bytes>,
+}
+
+impl Default for Downloader {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+impl Downloader {
+    #[must_use]
+    pub fn new() -> Self {
+        Self {
+            current_download: None,
+            video_size_cache: HashMap::new(),
+            printed_warning: false,
+            cached_cache_allocation: None,
+        }
+    }
+
+    /// Check if enough cache is available. Will wait for 10s if it's not.
+    async fn is_enough_cache_available(
+        &mut self,
+        app: &App,
+        max_cache_size: u64,
+        next_video: &Video,
+    ) -> Result<CacheSizeCheck> {
+        if let Some(cdownload) = &self.current_download {
+            if cdownload.extractor_hash == next_video.extractor_hash {
+                // If the video is already being downloaded it will always fit. Otherwise the
+                // download would not have been started.
+                return Ok(CacheSizeCheck::Fits);
+            }
+        }
+        let cache_allocation = Self::get_current_cache_allocation(app).await?;
+        let video_size = self.get_approx_video_size(app, next_video)?;
+
+        if video_size >= max_cache_size {
+            error!(
+                "The video '{}' ({}) exceeds the maximum cache size ({})! \
+                 Please set a bigger maximum (`--max-cache-size`) or skip it.",
+                next_video.title,
+                Bytes::new(video_size),
+                Bytes::new(max_cache_size)
+            );
+
+            return Ok(CacheSizeCheck::ExceedsMaxCacheSize);
+        }
+
+        if cache_allocation.as_u64() + video_size >= max_cache_size {
+            if !self.printed_warning {
+                warn!(
+                    "Can't download video: '{}' ({}) as it's too large for the cache ({} of {} allocated). \
+                     Waiting for cache size reduction..",
+                    next_video.title,
+                    Bytes::new(video_size),
+                    &cache_allocation,
+                    Bytes::new(max_cache_size)
+                );
+                self.printed_warning = true;
+
+                // Update this value immediately.
+                // This avoids printing the "Current cache size has changed .." warning below.
+                self.cached_cache_allocation = Some(cache_allocation);
+            }
+
+            if let Some(cca) = self.cached_cache_allocation {
+                if cca != cache_allocation {
+                    // Only print the warning if the display string has actually changed.
+                    // Otherwise, we might confuse the user
+                    if cca.to_string() != cache_allocation.to_string() {
+                        warn!(
+                            "Current cache size has changed, it's now: '{}'",
+                            cache_allocation
+                        );
+                    }
+                    debug!(
+                        "Cache size has changed: {} -> {}",
+                        cca.as_u64(),
+                        cache_allocation.as_u64()
+                    );
+                    self.cached_cache_allocation = Some(cache_allocation);
+                }
+            } else {
+                unreachable!(
+                    "The `printed_warning` should be false in this case, \
+                    and thus should have already set the `cached_cache_allocation`."
+                );
+            }
+
+            // Wait and hope, that a large video is deleted from the cache.
+            wait_for_cache_reduction(app).await?;
+            Ok(CacheSizeCheck::TooLarge)
+        } else {
+            self.printed_warning = false;
+            Ok(CacheSizeCheck::Fits)
+        }
+    }
+
+    /// The entry point to the Downloader.
+    /// This Downloader will periodically check if the database has changed, and then also
+    /// change which videos it downloads.
+    /// This will run, until the database doesn't contain any watchable videos
+    pub async fn consume(&mut self, app: Arc<App>, max_cache_size: u64) -> Result<()> {
+        while let Some(next_video) = get_next_uncached_video(&app).await? {
+            match self
+                .is_enough_cache_available(&app, max_cache_size, &next_video)
+                .await?
+            {
+                CacheSizeCheck::Fits => (),
+                CacheSizeCheck::TooLarge => continue,
+                CacheSizeCheck::ExceedsMaxCacheSize => bail!("Giving up."),
+            };
+
+            if self.current_download.is_some() {
+                let current_download = self.current_download.take().unreachable("It is `Some`.");
+
+                if current_download.task_handle.is_finished() {
+                    current_download.task_handle.await??;
+                    continue;
+                }
+
+                if next_video.extractor_hash == current_download.extractor_hash {
+                    // Reset the taken value
+                    self.current_download = Some(current_download);
+                } else {
+                    info!(
+                        "Noticed, that the next video is not the video being downloaded, replacing it ('{}' vs. '{}')!",
+                        next_video.extractor_hash.into_short_hash(&app).await?,
+                        current_download
+                            .extractor_hash
+                            .into_short_hash(&app)
+                            .await?
+                    );
+
+                    // Replace the currently downloading video
+                    // FIXME(@bpeetz): This does not work (probably because of the python part.) <2025-02-21>
+                    current_download.task_handle.abort();
+
+                    let new_current_download =
+                        CurrentDownload::new_from_video(Arc::clone(&app), next_video);
+
+                    self.current_download = Some(new_current_download);
+                }
+            } else {
+                info!(
+                    "No video is being downloaded right now, setting it to '{}'",
+                    next_video.title
+                );
+                let new_current_download =
+                    CurrentDownload::new_from_video(Arc::clone(&app), next_video);
+                self.current_download = Some(new_current_download);
+            }
+
+            // TODO(@bpeetz): Why do we sleep here? <2025-02-21>
+            time::sleep(Duration::from_secs(1)).await;
+        }
+
+        info!("Finished downloading!");
+        Ok(())
+    }
+
+    pub async fn get_current_cache_allocation(app: &App) -> Result<Bytes> {
+        fn dir_size(mut dir: fs::ReadDir) -> BoxFuture<'static, Result<Bytes>> {
+            async move {
+                let mut acc = 0;
+                while let Some(entry) = dir.next_entry().await? {
+                    let size = match entry.metadata().await? {
+                        data if data.is_dir() => {
+                            let path = entry.path();
+                            let read_dir = fs::read_dir(path).await?;
+
+                            dir_size(read_dir).await?.as_u64()
+                        }
+                        data => data.len(),
+                    };
+                    acc += size;
+                }
+                Ok(Bytes::new(acc))
+            }
+            .boxed()
+        }
+
+        let read_dir_result = match fs::read_dir(&app.config.paths.download_dir).await {
+            Ok(ok) => ok,
+            Err(err) => match err.kind() {
+                io::ErrorKind::NotFound => {
+                    fs::create_dir_all(&app.config.paths.download_dir)
+                        .await
+                        .with_context(|| {
+                            format!(
+                                "Failed to create download dir at: '{}'",
+                                &app.config.paths.download_dir.display()
+                            )
+                        })?;
+
+                    info!(
+                        "Created empty download dir at '{}'",
+                        &app.config.paths.download_dir.display(),
+                    );
+
+                    // The new dir should not contain anything (otherwise we would not have had to
+                    // create it)
+                    return Ok(Bytes::new(0));
+                }
+                err => Err(io::Error::from(err)).with_context(|| {
+                    format!(
+                        "Failed to get dir size of download dir at: '{}'",
+                        &app.config.paths.download_dir.display()
+                    )
+                })?,
+            },
+        };
+
+        dir_size(read_dir_result).await
+    }
+
+    fn get_approx_video_size(&mut self, app: &App, video: &Video) -> Result<u64> {
+        if let Some(value) = self.video_size_cache.get(&video.extractor_hash) {
+            Ok(*value)
+        } else {
+            // the subtitle file size should be negligible
+            let add_opts = YtDlpOptions {
+                subtitle_langs: String::new(),
+            };
+            let yt_dlp = download_opts(app, &add_opts)?;
+
+            let result = yt_dlp
+                .extract_info(&video.url, false, true)
+                .with_context(|| {
+                    format!("Failed to extract video information: '{}'", video.title)
+                })?;
+
+            let size = if let Some(val) = result.get("filesize") {
+                json_cast!(val, as_u64)
+            } else if let Some(serde_json::Value::Number(num)) = result.get("filesize_approx") {
+                // NOTE(@bpeetz): yt_dlp sets this value to `Null`, instead of omitting it when it
+                // can't calculate the approximate filesize.
+                // Thus, we have to check, that it is actually non-null, before we cast it. <2025-06-15>
+                json_cast!(num, as_u64)
+            } else if result.get("duration").is_some() && result.get("tbr").is_some() {
+                #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
+                let duration = json_get!(result, "duration", as_f64).ceil() as u64;
+
+                // TODO: yt_dlp gets this from the format
+                #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
+                let tbr = json_get!(result, "tbr", as_f64).ceil() as u64;
+
+                duration * tbr * (1000 / 8)
+            } else {
+                let hardcoded_default = Bytes::from_str("250 MiB").expect("This is hardcoded");
+                error!(
+                    "Failed to find a filesize for video: '{}' (Using hardcoded value of {})",
+                    video.title, hardcoded_default
+                );
+                hardcoded_default.as_u64()
+            };
+
+            assert_eq!(
+                self.video_size_cache.insert(video.extractor_hash, size),
+                None
+            );
+
+            Ok(size)
+        }
+    }
+
+    async fn actually_cache_video(app: &App, video: &Video) -> Result<()> {
+        debug!("Download started: {}", &video.title);
+
+        let addional_opts = get_video_yt_dlp_opts(app, &video.extractor_hash).await?;
+        let yt_dlp = download_opts(app, &addional_opts)?;
+
+        let result = yt_dlp
+            .download(&[video.url.clone()])
+            .with_context(|| format!("Failed to download video: '{}'", video.title))?;
+
+        assert_eq!(result.len(), 1);
+        let result = &result[0];
+
+        set_video_cache_path(app, &video.extractor_hash, Some(result)).await?;
+
+        info!(
+            "Video '{}' was downlaoded to path: {}",
+            video.title,
+            result.display()
+        );
+
+        Ok(())
+    }
+}
diff --git a/crates/yt/src/download/progress_hook.rs b/crates/yt/src/download/progress_hook.rs
new file mode 100644
index 0000000..ad754b0
--- /dev/null
+++ b/crates/yt/src/download/progress_hook.rs
@@ -0,0 +1,198 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    io::{Write, stderr},
+    process,
+};
+
+use bytes::Bytes;
+use log::{Level, log_enabled};
+use yt_dlp::mk_python_function;
+
+use crate::{
+    ansi_escape_codes::{clear_whole_line, move_to_col},
+    select::selection_file::duration::MaybeDuration,
+};
+
+/// # Panics
+/// If expectations fail.
+#[allow(clippy::too_many_lines, clippy::needless_pass_by_value)]
+pub fn progress_hook(
+    input: serde_json::Map<String, serde_json::Value>,
+) -> Result<(), std::io::Error> {
+    // Only add the handler, if the log-level is higher than Debug (this avoids covering debug
+    // messages).
+    if log_enabled!(Level::Debug) {
+        return Ok(());
+    }
+
+    macro_rules! get {
+        (@interrogate $item:ident, $type_fun:ident, $get_fun:ident, $name:expr) => {{
+            let a = $item.get($name).expect(concat!(
+                "The field '",
+                stringify!($name),
+                "' should exist."
+            ));
+
+            if a.$type_fun() {
+                a.$get_fun().expect(
+                    "The should have been checked in the if guard, so unpacking here is fine",
+                )
+            } else {
+                panic!(
+                    "Value {} => \n{}\n is not of type: {}",
+                    $name,
+                    a,
+                    stringify!($type_fun)
+                );
+            }
+        }};
+
+        ($type_fun:ident, $get_fun:ident, $name1:expr, $name2:expr) => {{
+            let a = get! {@interrogate input, is_object, as_object, $name1};
+            let b = get! {@interrogate a, $type_fun, $get_fun, $name2};
+            b
+        }};
+
+        ($type_fun:ident, $get_fun:ident, $name:expr) => {{
+            get! {@interrogate input, $type_fun, $get_fun, $name}
+        }};
+    }
+
+    macro_rules! default_get {
+        (@interrogate $item:ident, $default:expr, $get_fun:ident, $name:expr) => {{
+            let a = if let Some(field) = $item.get($name) {
+                field.$get_fun().unwrap_or($default)
+            } else {
+                $default
+            };
+            a
+        }};
+
+        ($get_fun:ident, $default:expr, $name1:expr, $name2:expr) => {{
+            let a = get! {@interrogate input, is_object, as_object, $name1};
+            let b = default_get! {@interrogate a, $default, $get_fun, $name2};
+            b
+        }};
+
+        ($get_fun:ident, $default:expr, $name:expr) => {{
+            default_get! {@interrogate input, $default, $get_fun, $name}
+        }};
+    }
+
+    macro_rules! c {
+        ($color:expr, $format:expr) => {
+            format!("\x1b[{}m{}\x1b[0m", $color, $format)
+        };
+    }
+
+    #[allow(clippy::items_after_statements)]
+    fn format_bytes(bytes: u64) -> String {
+        let bytes = Bytes::new(bytes);
+        bytes.to_string()
+    }
+
+    #[allow(clippy::items_after_statements)]
+    fn format_speed(speed: f64) -> String {
+        #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
+        let bytes = Bytes::new(speed.floor() as u64);
+        format!("{bytes}/s")
+    }
+
+    let get_title = || -> String {
+        match get! {is_string, as_str, "info_dict", "ext"} {
+            "vtt" => {
+                format!(
+                    "Subtitles ({})",
+                    default_get! {as_str, "<No Subtitle Language>", "info_dict", "name"}
+                )
+            }
+            "webm" | "mp4" | "mp3" | "m4a" => {
+                default_get! { as_str, "<No title>", "info_dict", "title"}.to_owned()
+            }
+            other => panic!("The extension '{other}' is not yet implemented"),
+        }
+    };
+
+    match get! {is_string, as_str, "status"} {
+        "downloading" => {
+            let elapsed = default_get! {as_f64, 0.0f64, "elapsed"};
+            let eta = default_get! {as_f64, 0.0, "eta"};
+            let speed = default_get! {as_f64, 0.0, "speed"};
+
+            let downloaded_bytes = get! {is_u64, as_u64, "downloaded_bytes"};
+            let (total_bytes, bytes_is_estimate): (u64, &'static str) = {
+                let total_bytes = default_get!(as_u64, 0, "total_bytes");
+                if total_bytes == 0 {
+                    let maybe_estimate = default_get!(as_u64, 0, "total_bytes_estimate");
+
+                    #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
+                    if maybe_estimate == 0 {
+                        // The download speed should be in bytes per second and the eta in seconds.
+                        // Thus multiplying them gets us the raw bytes (which were estimated by `yt_dlp`, from their `info.json`)
+                        let bytes_still_needed = (speed * eta).ceil() as u64;
+
+                        (downloaded_bytes + bytes_still_needed, "~")
+                    } else {
+                        (maybe_estimate, "~")
+                    }
+                } else {
+                    (total_bytes, "")
+                }
+            };
+
+            let percent: f64 = {
+                if total_bytes == 0 {
+                    100.0
+                } else {
+                    #[allow(
+                        clippy::cast_possible_truncation,
+                        clippy::cast_sign_loss,
+                        clippy::cast_precision_loss
+                    )]
+                    {
+                        (downloaded_bytes as f64 / total_bytes as f64) * 100.0
+                    }
+                }
+            };
+
+            clear_whole_line();
+            move_to_col(1);
+
+            eprint!(
+                "{} [{}/{} at {}] -> [{} of {}{} {}] ",
+                c!("34;1", get_title()),
+                c!("33;1", MaybeDuration::from_secs_f64(elapsed)),
+                c!("33;1", MaybeDuration::from_secs_f64(eta)),
+                c!("32;1", format_speed(speed)),
+                c!("31;1", format_bytes(downloaded_bytes)),
+                c!("31;1", bytes_is_estimate),
+                c!("31;1", format_bytes(total_bytes)),
+                c!("36;1", format!("{:.02}%", percent))
+            );
+            stderr().flush()?;
+        }
+        "finished" => {
+            eprintln!("-> Finished downloading.");
+        }
+        "error" => {
+            // TODO: This should probably return an Err. But I'm not so sure where the error would
+            // bubble up to (i.e., who would catch it) <2025-01-21>
+            eprintln!("-> Error while downloading: {}", get_title());
+            process::exit(1);
+        }
+        other => unreachable!("'{other}' should not be a valid state!"),
+    }
+
+    Ok(())
+}
+
+mk_python_function!(progress_hook, wrapped_progress_hook);
diff --git a/crates/yt/src/main.rs b/crates/yt/src/main.rs
new file mode 100644
index 0000000..f78c23e
--- /dev/null
+++ b/crates/yt/src/main.rs
@@ -0,0 +1,307 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+// `yt` is not a library. Besides, the `anyhow::Result` type is really useless, if you're not going
+// to print it anyways.
+#![allow(clippy::missing_errors_doc)]
+
+use std::{env::current_exe, sync::Arc};
+
+use anyhow::{Context, Result, bail};
+use app::App;
+use bytes::Bytes;
+use cache::{invalidate, maintain};
+use clap::{CommandFactory, Parser};
+use cli::{CacheCommand, SelectCommand, SubscriptionCommand, VideosCommand};
+use config::Config;
+use log::{error, info};
+use select::cmds::handle_select_cmd;
+use storage::video_database::get::video_by_hash;
+use tokio::{
+    fs::File,
+    io::{BufReader, stdin},
+    task::JoinHandle,
+};
+
+use crate::{cli::Command, storage::subscriptions};
+
+pub mod ansi_escape_codes;
+pub mod app;
+pub mod cli;
+pub mod unreachable;
+
+pub mod cache;
+pub mod comments;
+pub mod config;
+pub mod constants;
+pub mod download;
+pub mod select;
+pub mod status;
+pub mod storage;
+pub mod subscribe;
+pub mod update;
+pub mod version;
+pub mod videos;
+pub mod watch;
+
+#[tokio::main]
+// This is _the_ main function after all. It is not really good, but it sort of works.
+#[allow(clippy::too_many_lines)]
+async fn main() -> Result<()> {
+    clap_complete::CompleteEnv::with_factory(cli::CliArgs::command).complete();
+
+    let args = cli::CliArgs::parse();
+
+    // The default verbosity is 1 (Warn)
+    let verbosity: u8 = args.verbosity + 1;
+
+    stderrlog::new()
+        .module(module_path!())
+        .modules(&["yt_dlp".to_owned(), "libmpv2".to_owned()])
+        .quiet(args.quiet)
+        .show_module_names(false)
+        .color(stderrlog::ColorChoice::Auto)
+        .verbosity(verbosity as usize)
+        .timestamp(stderrlog::Timestamp::Off)
+        .init()
+        .expect("Let's just hope that this does not panic");
+
+    info!("Using verbosity level: '{} ({})'", verbosity, {
+        match verbosity {
+            0 => "Error",
+            1 => "Warn",
+            2 => "Info",
+            3 => "Debug",
+            4.. => "Trace",
+        }
+    });
+
+    let config = Config::from_config_file(args.db_path, args.config_path, args.color)?;
+    if args.version {
+        version::show(&config).await?;
+        return Ok(());
+    }
+
+    let app = App::new(config, !args.no_migrate_db).await?;
+
+    match args.command.unwrap_or(Command::default()) {
+        Command::Download {
+            force,
+            max_cache_size,
+        } => {
+            let max_cache_size =
+                max_cache_size.unwrap_or(app.config.download.max_cache_size.as_u64());
+            info!("Max cache size: '{}'", Bytes::new(max_cache_size));
+
+            maintain(&app, false).await?;
+            if force {
+                invalidate(&app, true).await?;
+            }
+
+            download::Downloader::new()
+                .consume(Arc::new(app), max_cache_size)
+                .await?;
+        }
+        Command::Select { cmd } => {
+            let cmd = cmd.unwrap_or(SelectCommand::default());
+
+            match cmd {
+                SelectCommand::File {
+                    done,
+                    use_last_selection,
+                } => Box::pin(select::select_file(&app, done, use_last_selection)).await?,
+                SelectCommand::Split {
+                    done,
+                    sort_key,
+                    sort_mode,
+                } => Box::pin(select::select_split(&app, done, sort_key, sort_mode)).await?,
+                _ => Box::pin(handle_select_cmd(&app, cmd, None)).await?,
+            }
+        }
+        Command::Sedowa {} => {
+            Box::pin(select::select_file(&app, false, false)).await?;
+
+            let arc_app = Arc::new(app);
+            dowa(arc_app).await?;
+        }
+        Command::Dowa {} => {
+            let arc_app = Arc::new(app);
+            dowa(arc_app).await?;
+        }
+        Command::Videos { cmd } => match cmd {
+            VideosCommand::List {
+                search_query,
+                limit,
+            } => {
+                videos::query(&app, limit, search_query)
+                    .await
+                    .context("Failed to query videos")?;
+            }
+            VideosCommand::Info { hash } => {
+                let video = video_by_hash(&app, &hash.realize(&app).await?).await?;
+
+                print!(
+                    "{}",
+                    &video
+                        .to_info_display(&app)
+                        .await
+                        .context("Failed to format video")?
+                );
+            }
+        },
+        Command::Update {
+            max_backlog,
+            subscriptions,
+            grouped,
+            current_progress,
+            total_number,
+        } => {
+            let all_subs = subscriptions::get(&app).await?;
+
+            for sub in &subscriptions {
+                if !all_subs.0.contains_key(sub) {
+                    bail!(
+                        "Your specified subscription to update '{}' is not a subscription!",
+                        sub
+                    )
+                }
+            }
+
+            let max_backlog = max_backlog.unwrap_or(app.config.update.max_backlog);
+
+            if grouped {
+                const CHUNK_SIZE: usize = 50;
+
+                assert!(current_progress.is_none() && total_number.is_none());
+
+                let subs = {
+                    if subscriptions.is_empty() {
+                        all_subs.0.into_iter().map(|sub| sub.0).collect()
+                    } else {
+                        subscriptions
+                    }
+                };
+
+                let total_number = subs.len();
+                let mut current_progress = 0;
+                for chunk in subs.chunks(CHUNK_SIZE) {
+                    info!(
+                        "$ yt update {}",
+                        chunk
+                            .iter()
+                            .map(|sub_name| format!("{sub_name:#?}"))
+                            .collect::<Vec<_>>()
+                            .join(" ")
+                    );
+
+                    let status = std::process::Command::new(
+                        current_exe().context("Failed to get the current exe to re-execute")?,
+                    )
+                    .args((0..args.verbosity).map(|_| "-v"))
+                    .arg("update")
+                    .args(["--current-progress", current_progress.to_string().as_str()])
+                    .args(["--total-number", total_number.to_string().as_str()])
+                    .args(chunk)
+                    .status()?;
+
+                    if !status.success() {
+                        bail!("grouped yt update: Child process failed.");
+                    }
+
+                    current_progress += CHUNK_SIZE;
+                }
+            } else {
+                update::update(
+                    &app,
+                    max_backlog,
+                    subscriptions,
+                    total_number,
+                    current_progress,
+                )
+                .await?;
+            }
+        }
+        Command::Subscriptions { cmd } => match cmd {
+            SubscriptionCommand::Add { name, url } => {
+                subscribe::subscribe(&app, name, url)
+                    .await
+                    .context("Failed to add a subscription")?;
+            }
+            SubscriptionCommand::Remove { name } => {
+                subscribe::unsubscribe(&app, name)
+                    .await
+                    .context("Failed to remove a subscription")?;
+            }
+            SubscriptionCommand::List {} => {
+                let all_subs = subscriptions::get(&app).await?;
+
+                for (key, val) in all_subs.0 {
+                    println!("{}: '{}'", key, val.url);
+                }
+            }
+            SubscriptionCommand::Export {} => {
+                let all_subs = subscriptions::get(&app).await?;
+                for val in all_subs.0.values() {
+                    println!("{}", val.url);
+                }
+            }
+            SubscriptionCommand::Import { file, force } => {
+                if let Some(file) = file {
+                    let f = File::open(file).await?;
+
+                    subscribe::import(&app, BufReader::new(f), force).await?;
+                } else {
+                    subscribe::import(&app, BufReader::new(stdin()), force).await?;
+                }
+            }
+        },
+
+        Command::Watch {} => watch::watch(Arc::new(app)).await?,
+        Command::Playlist { watch } => watch::playlist::playlist(&app, watch).await?,
+
+        Command::Status {} => status::show(&app).await?,
+        Command::Config {} => status::config(&app)?,
+
+        Command::Database { command } => match command {
+            CacheCommand::Invalidate { hard } => invalidate(&app, hard).await?,
+            CacheCommand::Maintain { all } => maintain(&app, all).await?,
+        },
+
+        Command::Comments {} => {
+            comments::comments(&app).await?;
+        }
+        Command::Description {} => {
+            comments::description(&app).await?;
+        }
+    }
+
+    Ok(())
+}
+
+async fn dowa(arc_app: Arc<App>) -> Result<()> {
+    let max_cache_size = arc_app.config.download.max_cache_size;
+    info!("Max cache size: '{max_cache_size}'");
+
+    let arc_app_clone = Arc::clone(&arc_app);
+    let download: JoinHandle<()> = tokio::spawn(async move {
+        let result = download::Downloader::new()
+            .consume(arc_app_clone, max_cache_size.as_u64())
+            .await;
+
+        if let Err(err) = result {
+            error!("Error from downloader: {err:?}");
+        }
+    });
+
+    watch::watch(arc_app).await?;
+    download.await?;
+    Ok(())
+}
diff --git a/crates/yt/src/select/cmds/add.rs b/crates/yt/src/select/cmds/add.rs
new file mode 100644
index 0000000..2c9a323
--- /dev/null
+++ b/crates/yt/src/select/cmds/add.rs
@@ -0,0 +1,193 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use crate::{
+    app::App,
+    download::download_options::download_opts,
+    storage::video_database::{
+        self, extractor_hash::ExtractorHash, get::get_all_hashes, set::add_video,
+    },
+    update::video_entry_to_video,
+};
+
+use anyhow::{Context, Result, bail};
+use log::{error, warn};
+use url::Url;
+use yt_dlp::{YoutubeDL, info_json::InfoJson, json_cast, json_get};
+
+#[allow(clippy::too_many_lines)]
+pub(super) async fn add(
+    app: &App,
+    urls: Vec<Url>,
+    start: Option<usize>,
+    stop: Option<usize>,
+) -> Result<()> {
+    for url in urls {
+        async fn process_and_add(app: &App, entry: InfoJson, yt_dlp: &YoutubeDL) -> Result<()> {
+            let url = json_get!(entry, "url", as_str).parse()?;
+
+            let entry = yt_dlp
+                .extract_info(&url, false, true)
+                .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?;
+
+            add_entry(app, entry).await?;
+
+            Ok(())
+        }
+
+        async fn add_entry(app: &App, entry: InfoJson) -> Result<()> {
+            // We have to re-fetch all hashes every time, because a user could try to add the same
+            // URL twice (for whatever reason.)
+            let hashes = get_all_hashes(app)
+                .await
+                .context("Failed to fetch all video hashes")?;
+
+            let extractor_hash = blake3::hash(json_get!(entry, "id", as_str).as_bytes());
+            if hashes.contains(&extractor_hash) {
+                error!(
+                    "Video '{}'{} is already in the database. Skipped adding it",
+                    ExtractorHash::from_hash(extractor_hash)
+                        .into_short_hash(app)
+                        .await
+                        .with_context(|| format!(
+                            "Failed to format hash of video '{}' as short hash",
+                            entry
+                                .get("url")
+                                .map_or("<Unknown video Url>".to_owned(), ToString::to_string)
+                        ))?,
+                    entry.get("title").map_or(String::new(), |title| format!(
+                        " (\"{}\")",
+                        json_cast!(title, as_str)
+                    ))
+                );
+                return Ok(());
+            }
+
+            let video = video_entry_to_video(&entry, None)?;
+            add_video(app, video.clone()).await?;
+
+            println!("{}", &video.to_line_display(app).await?);
+
+            Ok(())
+        }
+
+        let yt_dlp = download_opts(
+            app,
+            &video_database::YtDlpOptions {
+                subtitle_langs: String::new(),
+            },
+        )?;
+
+        let entry = yt_dlp
+            .extract_info(&url, false, true)
+            .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?;
+
+        match entry.get("_type").map(|val| json_cast!(val, as_str)) {
+            Some("video") => {
+                add_entry(app, entry).await?;
+                if start.is_some() || stop.is_some() {
+                    warn!(
+                        "You added `start` and/or `stop` markers for a single *video*! These will be ignored."
+                    );
+                }
+            }
+            Some("playlist") => {
+                if let Some(entries) = entry.get("entries") {
+                    let entries = json_cast!(entries, as_array);
+                    let start = start.unwrap_or(0);
+                    let stop = stop.unwrap_or(entries.len() - 1);
+
+                    let respected_entries =
+                        take_vector(entries, start, stop).with_context(|| {
+                            format!(
+                                "Failed to take entries starting at: {start} and ending with {stop}"
+                            )
+                        })?;
+
+                    if respected_entries.is_empty() {
+                        warn!("No entries found, after applying your start/stop limits.");
+                    } else {
+                        // Pre-warm the cache
+                        process_and_add(
+                            app,
+                            json_cast!(respected_entries[0], as_object).to_owned(),
+                            &yt_dlp,
+                        )
+                        .await?;
+                        let respected_entries = &respected_entries[1..];
+
+                        let futures: Vec<_> = respected_entries
+                            .iter()
+                            .map(|entry| {
+                                process_and_add(
+                                    app,
+                                    json_cast!(entry, as_object).to_owned(),
+                                    &yt_dlp,
+                                )
+                            })
+                            .collect();
+
+                        for fut in futures {
+                            fut.await?;
+                        }
+                    }
+                } else {
+                    bail!("Your playlist does not seem to have any entries!")
+                }
+            }
+            other => bail!(
+                "Your URL should point to a video or a playlist, but points to a '{:#?}'",
+                other
+            ),
+        }
+    }
+
+    Ok(())
+}
+
+fn take_vector<T>(vector: &[T], start: usize, stop: usize) -> Result<&[T]> {
+    let length = vector.len();
+
+    if stop >= length {
+        bail!(
+            "Your stop marker ({stop}) exceeds the possible entries ({length})! Remember that it is zero indexed."
+        );
+    }
+
+    Ok(&vector[start..=stop])
+}
+
+#[cfg(test)]
+mod test {
+    use crate::select::cmds::add::take_vector;
+
+    #[test]
+    fn test_vector_take() {
+        let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+        let new_vec = take_vector(&vec, 2, 8).unwrap();
+
+        assert_eq!(new_vec, vec![2, 3, 4, 5, 6, 7, 8]);
+    }
+
+    #[test]
+    fn test_vector_take_overflow() {
+        let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+        assert!(take_vector(&vec, 0, 12).is_err());
+    }
+
+    #[test]
+    fn test_vector_take_equal() {
+        let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+        assert!(take_vector(&vec, 0, 11).is_err());
+    }
+}
diff --git a/crates/yt/src/select/cmds/mod.rs b/crates/yt/src/select/cmds/mod.rs
new file mode 100644
index 0000000..9da795a
--- /dev/null
+++ b/crates/yt/src/select/cmds/mod.rs
@@ -0,0 +1,113 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use crate::{
+    app::App,
+    cli::{SelectCommand, SharedSelectionCommandArgs},
+    storage::video_database::{
+        Priority, VideoOptions, VideoStatus,
+        get::video_by_hash,
+        set::{set_video_options, video_status},
+    },
+};
+
+use anyhow::{Context, Result, bail};
+
+mod add;
+
+pub async fn handle_select_cmd(
+    app: &App,
+    cmd: SelectCommand,
+    line_number: Option<i64>,
+) -> Result<()> {
+    match cmd {
+        SelectCommand::Pick { shared } => {
+            handle_status_change(app, shared, line_number, VideoStatus::Pick).await?;
+        }
+        SelectCommand::Drop { shared } => {
+            handle_status_change(app, shared, line_number, VideoStatus::Drop).await?;
+        }
+        SelectCommand::Watched { shared } => {
+            handle_status_change(app, shared, line_number, VideoStatus::Watched).await?;
+        }
+        SelectCommand::Add { urls, start, stop } => {
+            Box::pin(add::add(app, urls, start, stop)).await?;
+        }
+        SelectCommand::Watch { shared } => {
+            let hash = shared.hash.clone().realize(app).await?;
+
+            let video = video_by_hash(app, &hash).await?;
+
+            if let VideoStatus::Cached {
+                cache_path,
+                is_focused,
+            } = video.status
+            {
+                handle_status_change(
+                    app,
+                    shared,
+                    line_number,
+                    VideoStatus::Cached {
+                        cache_path,
+                        is_focused,
+                    },
+                )
+                .await?;
+            } else {
+                handle_status_change(app, shared, line_number, VideoStatus::Watch).await?;
+            }
+        }
+
+        SelectCommand::Url { shared } => {
+            let Some(url) = shared.url else {
+                bail!("You need to provide a url to `select url ..`")
+            };
+
+            let mut firefox = std::process::Command::new("firefox");
+            firefox.args(["-P", "timesinks.youtube"]);
+            firefox.arg(url.as_str());
+            let _handle = firefox.spawn().context("Failed to run firefox")?;
+        }
+        SelectCommand::File { .. } | SelectCommand::Split { .. } => {
+            unreachable!("This should have been filtered out")
+        }
+    }
+    Ok(())
+}
+
+async fn handle_status_change(
+    app: &App,
+    shared: SharedSelectionCommandArgs,
+    line_number: Option<i64>,
+    new_status: VideoStatus,
+) -> Result<()> {
+    let hash = shared.hash.realize(app).await?;
+    let video_options = VideoOptions::new(
+        shared
+            .subtitle_langs
+            .unwrap_or(app.config.select.subtitle_langs.clone()),
+        shared.speed.unwrap_or(app.config.select.playback_speed),
+    );
+    let priority = compute_priority(line_number, shared.priority);
+
+    video_status(app, &hash, new_status, priority).await?;
+    set_video_options(app, &hash, &video_options).await?;
+
+    Ok(())
+}
+
+fn compute_priority(line_number: Option<i64>, priority: Option<i64>) -> Option<Priority> {
+    if let Some(pri) = priority {
+        Some(Priority::from(pri))
+    } else {
+        line_number.map(Priority::from)
+    }
+}
diff --git a/crates/yt/src/select/mod.rs b/crates/yt/src/select/mod.rs
new file mode 100644
index 0000000..2478b76
--- /dev/null
+++ b/crates/yt/src/select/mod.rs
@@ -0,0 +1,321 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    collections::HashMap,
+    env::{self},
+    fs::{self, File, OpenOptions},
+    io::{BufRead, BufReader, BufWriter, Read, Seek, Write},
+    iter,
+    path::Path,
+    string::String,
+};
+
+use crate::{
+    app::App,
+    cli::{CliArgs, SelectSplitSortKey, SelectSplitSortMode},
+    constants::HELP_STR,
+    storage::video_database::{Video, VideoStatusMarker, get},
+    unreachable::Unreachable,
+};
+
+use anyhow::{Context, Result, bail};
+use clap::Parser;
+use cmds::handle_select_cmd;
+use futures::{TryStreamExt, stream::FuturesOrdered};
+use log::info;
+use selection_file::process_line;
+use tempfile::Builder;
+use tokio::process::Command;
+
+pub mod cmds;
+pub mod selection_file;
+
+pub async fn select_split(
+    app: &App,
+    done: bool,
+    sort_key: SelectSplitSortKey,
+    sort_mode: SelectSplitSortMode,
+) -> Result<()> {
+    let temp_dir = Builder::new()
+        .prefix("yt_video_select-")
+        .rand_bytes(6)
+        .tempdir()
+        .context("Failed to get tempdir")?;
+
+    let matching_videos = get_videos(app, done).await?;
+
+    let mut no_author = vec![];
+    let mut author_map = HashMap::new();
+    for video in matching_videos {
+        if let Some(sub) = &video.parent_subscription_name {
+            if author_map.contains_key(sub) {
+                let vec: &mut Vec<_> = author_map
+                    .get_mut(sub)
+                    .unreachable("This key is set, we checked in the if above");
+
+                vec.push(video);
+            } else {
+                author_map.insert(sub.to_owned(), vec![video]);
+            }
+        } else {
+            no_author.push(video);
+        }
+    }
+
+    let author_map = {
+        let mut temp_vec: Vec<_> = author_map.into_iter().collect();
+
+        match sort_key {
+            SelectSplitSortKey::Publisher => {
+                // PERFORMANCE: The clone here should not be neeed.  <2025-06-15>
+                temp_vec.sort_by_key(|(name, _): &(String, Vec<Video>)| name.to_owned());
+            }
+            SelectSplitSortKey::Videos => {
+                temp_vec.sort_by_key(|(_, videos): &(String, Vec<Video>)| videos.len());
+            }
+        }
+
+        match sort_mode {
+            SelectSplitSortMode::Asc => {
+                // Std's default mode is ascending.
+            }
+            SelectSplitSortMode::Desc => {
+                temp_vec.reverse();
+            }
+        }
+
+        temp_vec
+    };
+
+    for (index, (name, videos)) in author_map
+        .into_iter()
+        .chain(iter::once((
+            "<No parent subscription>".to_owned(),
+            no_author,
+        )))
+        .enumerate()
+    {
+        let mut file_path = temp_dir.path().join(format!("{index:02}_{name}"));
+        file_path.set_extension("yts");
+
+        let tmp_file = File::create(&file_path)
+            .with_context(|| format!("Falied to create file at: {}", file_path.display()))?;
+
+        write_videos_to_file(app, &tmp_file, &videos)
+            .await
+            .with_context(|| format!("Falied to populate file at: {}", file_path.display()))?;
+    }
+
+    open_editor_at(temp_dir.path()).await?;
+
+    let mut paths = vec![];
+    for maybe_entry in temp_dir
+        .path()
+        .read_dir()
+        .context("Failed to open temp dir for reading")?
+    {
+        let entry = maybe_entry.context("Failed to read entry in temp dir")?;
+
+        if !entry.file_type()?.is_file() {
+            bail!("Found non-file entry: {}", entry.path().display());
+        }
+
+        paths.push(entry.path());
+    }
+
+    paths.sort();
+
+    let mut persistent_file = OpenOptions::new()
+        .read(true)
+        .write(true)
+        .truncate(true)
+        .open(&app.config.paths.last_selection_path)
+        .context("Failed to open persistent selection file")?;
+
+    for path in paths {
+        let mut read_file = File::open(path)?;
+
+        let mut buffer = vec![];
+        read_file.read_to_end(&mut buffer)?;
+        persistent_file.write_all(&buffer)?;
+    }
+
+    persistent_file.rewind()?;
+
+    let processed = process_file(app, &persistent_file).await?;
+
+    info!("Processed {processed} records.");
+    temp_dir.close().context("Failed to close the temp dir")?;
+    Ok(())
+}
+
+pub async fn select_file(app: &App, done: bool, use_last_selection: bool) -> Result<()> {
+    let temp_file = Builder::new()
+        .prefix("yt_video_select-")
+        .suffix(".yts")
+        .rand_bytes(6)
+        .tempfile()
+        .context("Failed to get tempfile")?;
+
+    if use_last_selection {
+        fs::copy(&app.config.paths.last_selection_path, &temp_file)?;
+    } else {
+        let matching_videos = get_videos(app, done).await?;
+
+        write_videos_to_file(app, temp_file.as_file(), &matching_videos).await?;
+    }
+
+    open_editor_at(temp_file.path()).await?;
+
+    let read_file = temp_file.reopen()?;
+    fs::copy(temp_file.path(), &app.config.paths.last_selection_path)
+        .context("Failed to persist selection file")?;
+
+    let processed = process_file(app, &read_file).await?;
+    info!("Processed {processed} records.");
+
+    Ok(())
+}
+
+async fn get_videos(app: &App, include_done: bool) -> Result<Vec<Video>> {
+    if include_done {
+        get::videos(app, VideoStatusMarker::ALL).await
+    } else {
+        get::videos(
+            app,
+            &[
+                VideoStatusMarker::Pick,
+                //
+                VideoStatusMarker::Watch,
+                VideoStatusMarker::Cached,
+            ],
+        )
+        .await
+    }
+}
+
+async fn write_videos_to_file(app: &App, file: &File, videos: &[Video]) -> Result<()> {
+    // Warm-up the cache for the display rendering of the videos.
+    // Otherwise the futures would all try to warm it up at the same time.
+    if let Some(vid) = videos.first() {
+        drop(vid.to_line_display(app).await?);
+    }
+
+    let mut edit_file = BufWriter::new(file);
+
+    videos
+        .iter()
+        .map(|vid| vid.to_select_file_display(app))
+        .collect::<FuturesOrdered<_>>()
+        .try_collect::<Vec<String>>()
+        .await?
+        .into_iter()
+        .try_for_each(|line| -> Result<()> {
+            edit_file
+                .write_all(line.as_bytes())
+                .context("Failed to write to `edit_file`")?;
+
+            Ok(())
+        })?;
+
+    edit_file.write_all(HELP_STR.as_bytes())?;
+    edit_file.flush().context("Failed to flush edit file")?;
+
+    Ok(())
+}
+
+async fn process_file(app: &App, file: &File) -> Result<i64> {
+    let reader = BufReader::new(file);
+
+    let mut line_number = 0;
+
+    for line in reader.lines() {
+        let line = line.context("Failed to read a line")?;
+
+        if let Some(line) = process_line(&line)? {
+            line_number -= 1;
+
+            // debug!(
+            //     "Parsed command: `{}`",
+            //     line.iter()
+            //         .map(|val| format!("\"{}\"", val))
+            //         .collect::<Vec<String>>()
+            //         .join(" ")
+            // );
+
+            let arg_line = ["yt", "select"]
+                .into_iter()
+                .chain(line.iter().map(String::as_str));
+
+            let args = CliArgs::parse_from(arg_line);
+
+            let crate::cli::Command::Select { cmd } = args
+                .command
+                .unreachable("This will be some, as we constructed it above.")
+            else {
+                unreachable!("This is checked in the `filter_line` function")
+            };
+
+            Box::pin(handle_select_cmd(
+                app,
+                cmd.unreachable(
+                    "This value should always be some \
+                    here, as it would otherwise thrown an error above.",
+                ),
+                Some(line_number),
+            ))
+            .await?;
+        }
+    }
+
+    Ok(-line_number)
+}
+
+async fn open_editor_at(path: &Path) -> Result<()> {
+    let editor = env::var("EDITOR").unwrap_or("nvim".to_owned());
+
+    let mut nvim = Command::new(&editor);
+    nvim.arg(path);
+    let status = nvim
+        .status()
+        .await
+        .with_context(|| format!("Falied to run editor: {editor}"))?;
+
+    if status.success() {
+        Ok(())
+    } else {
+        bail!("Editor ({editor}) exited with error status: {}", status)
+    }
+}
+
+// // FIXME: There should be no reason why we need to re-run yt, just to get the help string. But I've
+// // yet to find a way to do it without the extra exec <2024-08-20>
+// async fn get_help() -> Result<String> {
+//     let binary_name = current_exe()?;
+//     let cmd = Command::new(binary_name)
+//         .args(&["select", "--help"])
+//         .output()
+//         .await?;
+//
+//     assert_eq!(cmd.status.code(), Some(0));
+//
+//     let output = String::from_utf8(cmd.stdout).expect("Our help output was not utf8?");
+//
+//     let out = output
+//         .lines()
+//         .map(|line| format!("# {}\n", line))
+//         .collect::<String>();
+//
+//     debug!("Returning help: '{}'", &out);
+//
+//     Ok(out)
+// }
diff --git a/crates/yt/src/select/selection_file/duration.rs b/crates/yt/src/select/selection_file/duration.rs
new file mode 100644
index 0000000..668a0b8
--- /dev/null
+++ b/crates/yt/src/select/selection_file/duration.rs
@@ -0,0 +1,246 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::str::FromStr;
+use std::time::Duration;
+
+use anyhow::{Result, bail};
+
+const SECOND: u64 = 1;
+const MINUTE: u64 = 60 * SECOND;
+const HOUR: u64 = 60 * MINUTE;
+const DAY: u64 = 24 * HOUR;
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+pub struct MaybeDuration {
+    time: Option<Duration>,
+}
+
+impl MaybeDuration {
+    #[must_use]
+    pub fn from_std(d: Duration) -> Self {
+        Self { time: Some(d) }
+    }
+
+    #[must_use]
+    pub fn from_secs_f64(d: f64) -> Self {
+        Self {
+            time: Some(Duration::from_secs_f64(d)),
+        }
+    }
+    #[must_use]
+    pub fn from_maybe_secs_f64(d: Option<f64>) -> Self {
+        Self {
+            time: d.map(Duration::from_secs_f64),
+        }
+    }
+    #[must_use]
+    pub fn from_secs(d: u64) -> Self {
+        Self {
+            time: Some(Duration::from_secs(d)),
+        }
+    }
+
+    #[must_use]
+    pub fn zero() -> Self {
+        Self {
+            time: Some(Duration::default()),
+        }
+    }
+
+    /// Try to return the current duration encoded as seconds.
+    #[must_use]
+    pub fn as_secs(&self) -> Option<u64> {
+        self.time.map(|v| v.as_secs())
+    }
+
+    /// Try to return the current duration encoded as seconds and nanoseconds.
+    #[must_use]
+    pub fn as_secs_f64(&self) -> Option<f64> {
+        self.time.map(|v| v.as_secs_f64())
+    }
+}
+
+impl FromStr for MaybeDuration {
+    type Err = anyhow::Error;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        #[derive(Debug, Clone, Copy)]
+        enum Token {
+            Number(u64),
+            UnitConstant((char, u64)),
+        }
+
+        struct Tokenizer<'a> {
+            input: &'a str,
+        }
+
+        impl Tokenizer<'_> {
+            fn next(&mut self) -> Result<Option<Token>> {
+                loop {
+                    if let Some(next) = self.peek() {
+                        match next {
+                            '0'..='9' => {
+                                let mut number = self.expect_num();
+                                while matches!(self.peek(), Some('0'..='9')) {
+                                    number *= 10;
+                                    number += self.expect_num();
+                                }
+                                break Ok(Some(Token::Number(number)));
+                            }
+                            's' => {
+                                self.chomp();
+                                break Ok(Some(Token::UnitConstant(('s', SECOND))));
+                            }
+                            'm' => {
+                                self.chomp();
+                                break Ok(Some(Token::UnitConstant(('m', MINUTE))));
+                            }
+                            'h' => {
+                                self.chomp();
+                                break Ok(Some(Token::UnitConstant(('h', HOUR))));
+                            }
+                            'd' => {
+                                self.chomp();
+                                break Ok(Some(Token::UnitConstant(('d', DAY))));
+                            }
+                            ' ' => {
+                                // Simply ignore white space
+                                self.chomp();
+                            }
+                            other => bail!("Unknown unit: {other:#?}"),
+                        }
+                    } else {
+                        break Ok(None);
+                    }
+                }
+            }
+
+            fn chomp(&mut self) {
+                self.input = &self.input[1..];
+            }
+
+            fn peek(&self) -> Option<char> {
+                self.input.chars().next()
+            }
+
+            fn expect_num(&mut self) -> u64 {
+                let next = self.peek().expect("Should be some at this point");
+                self.chomp();
+                assert!(next.is_ascii_digit());
+                (next as u64) - ('0' as u64)
+            }
+        }
+
+        if s == "[No duration]" {
+            return Ok(Self { time: None });
+        }
+
+        let mut tokenizer = Tokenizer { input: s };
+
+        let mut value = 0;
+        let mut current_val = None;
+        while let Some(token) = tokenizer.next()? {
+            match token {
+                Token::Number(number) => {
+                    if let Some(current_val) = current_val {
+                        bail!("Failed to find unit for number: {current_val}");
+                    }
+
+                    {
+                        current_val = Some(number);
+                    }
+                }
+                Token::UnitConstant((name, unit)) => {
+                    if let Some(cval) = current_val {
+                        value += cval * unit;
+                        current_val = None;
+                    } else {
+                        bail!("Found unit without number: {name:#?}");
+                    }
+                }
+            }
+        }
+
+        if let Some(current_val) = current_val {
+            bail!("Duration endet without unit, number was: {current_val}");
+        }
+
+        Ok(Self {
+            time: Some(Duration::from_secs(value)),
+        })
+    }
+}
+
+impl std::fmt::Display for MaybeDuration {
+    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+        if let Some(self_seconds) = self.as_secs() {
+            let base_day = self_seconds - (self_seconds % DAY);
+            let base_hour = (self_seconds % DAY) - ((self_seconds % DAY) % HOUR);
+            let base_min = (self_seconds % HOUR) - (((self_seconds % DAY) % HOUR) % MINUTE);
+            let base_sec = ((self_seconds % DAY) % HOUR) % MINUTE;
+
+            let d = base_day / DAY;
+            let h = base_hour / HOUR;
+            let m = base_min / MINUTE;
+            let s = base_sec / SECOND;
+
+            if d > 0 {
+                write!(fmt, "{d}d {h}h {m}m")
+            } else if h > 0 {
+                write!(fmt, "{h}h {m}m")
+            } else {
+                write!(fmt, "{m}m {s}s")
+            }
+        } else {
+            write!(fmt, "[No duration]")
+        }
+    }
+}
+#[cfg(test)]
+mod test {
+    use std::str::FromStr;
+
+    use crate::select::selection_file::duration::{DAY, HOUR, MINUTE};
+
+    use super::MaybeDuration;
+
+    fn mk_roundtrip(input: MaybeDuration, expected: &str) {
+        let output = MaybeDuration::from_str(expected).unwrap();
+
+        assert_eq!(input.to_string(), output.to_string());
+        assert_eq!(input.to_string(), expected);
+        assert_eq!(
+            MaybeDuration::from_str(input.to_string().as_str()).unwrap(),
+            output
+        );
+    }
+
+    #[test]
+    fn test_roundtrip_duration_1h() {
+        mk_roundtrip(MaybeDuration::from_secs(HOUR), "1h 0m");
+    }
+    #[test]
+    fn test_roundtrip_duration_30min() {
+        mk_roundtrip(MaybeDuration::from_secs(MINUTE * 30), "30m 0s");
+    }
+    #[test]
+    fn test_roundtrip_duration_1d() {
+        mk_roundtrip(
+            MaybeDuration::from_secs(DAY + MINUTE * 30 + HOUR * 2),
+            "1d 2h 30m",
+        );
+    }
+    #[test]
+    fn test_roundtrip_duration_none() {
+        mk_roundtrip(MaybeDuration::from_maybe_secs_f64(None), "[No duration]");
+    }
+}
diff --git a/crates/yt/src/select/selection_file/help.str b/crates/yt/src/select/selection_file/help.str
new file mode 100644
index 0000000..e3cc347
--- /dev/null
+++ b/crates/yt/src/select/selection_file/help.str
@@ -0,0 +1,12 @@
+# Commands:
+#   w,  watch    [-p,-s,-l]   Mark the video given by the hash to be watched
+#   wd, watched  [-p,-s,-l]   Mark the video given by the hash as already watched
+#   d,  drop     [-p,-s,-l]   Mark the video given by the hash to be dropped
+#   u,  url      [-p,-s,-l]   Open the video URL in Firefox's `timesinks.youtube` profile
+#   p,  pick     [-p,-s,-l]   Reset the videos status to 'Pick'
+#   a,  add      URL          Add a video, defined by the URL
+#
+# See `yt select <cmd_name> --help` for more help.
+#
+# These lines can be re-ordered; they are executed from top to bottom.
+# vim: filetype=yts conceallevel=2 concealcursor=nc colorcolumn= nowrap
diff --git a/crates/yt/src/select/selection_file/help.str.license b/crates/yt/src/select/selection_file/help.str.license
new file mode 100644
index 0000000..a0e196c
--- /dev/null
+++ b/crates/yt/src/select/selection_file/help.str.license
@@ -0,0 +1,10 @@
+yt - A fully featured command line YouTube client
+
+Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+SPDX-License-Identifier: GPL-3.0-or-later
+
+This file is part of Yt.
+
+You should have received a copy of the License along with this program.
+If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
diff --git a/crates/yt/src/select/selection_file/mod.rs b/crates/yt/src/select/selection_file/mod.rs
new file mode 100644
index 0000000..f5e0531
--- /dev/null
+++ b/crates/yt/src/select/selection_file/mod.rs
@@ -0,0 +1,42 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+//! The data structures needed to express the file, which the user edits
+
+use anyhow::{Result, bail};
+use shlex::Shlex;
+
+pub mod duration;
+
+/// # Panics
+/// If internal assertions fail.
+pub fn process_line(line: &str) -> Result<Option<Vec<String>>> {
+    // Filter out comments and empty lines
+    if line.starts_with('#') || line.trim().is_empty() {
+        Ok(None)
+    } else {
+        let split: Vec<_> = {
+            let mut shl = Shlex::new(line);
+            let res = shl.by_ref().collect();
+
+            if shl.had_error {
+                bail!("Failed to parse line '{line}'")
+            }
+
+            assert_eq!(shl.line_no, 1, "A unexpected newline appeared");
+            res
+        };
+
+        assert!(!split.is_empty());
+
+        Ok(Some(split))
+    }
+}
diff --git a/crates/yt/src/status/mod.rs b/crates/yt/src/status/mod.rs
new file mode 100644
index 0000000..6883802
--- /dev/null
+++ b/crates/yt/src/status/mod.rs
@@ -0,0 +1,130 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::time::Duration;
+
+use crate::{
+    app::App,
+    download::Downloader,
+    select::selection_file::duration::MaybeDuration,
+    storage::{
+        subscriptions,
+        video_database::{VideoStatusMarker, get},
+    },
+};
+
+use anyhow::{Context, Result};
+use bytes::Bytes;
+
+macro_rules! get {
+    ($videos:expr, $status:ident) => {
+        $videos
+            .iter()
+            .filter(|vid| vid.status.as_marker() == VideoStatusMarker::$status)
+            .count()
+    };
+
+    (@collect $videos:expr, $status:ident) => {
+        $videos
+            .iter()
+            .filter(|vid| vid.status.as_marker() == VideoStatusMarker::$status)
+            .collect()
+    };
+}
+
+pub async fn show(app: &App) -> Result<()> {
+    let all_videos = get::videos(app, VideoStatusMarker::ALL).await?;
+
+    // lengths
+    let picked_videos_len = get!(all_videos, Pick);
+
+    let watch_videos_len = get!(all_videos, Watch);
+    let cached_videos_len = get!(all_videos, Cached);
+    let watched_videos_len = get!(all_videos, Watched);
+    let watched_videos: Vec<_> = get!(@collect all_videos, Watched);
+
+    let drop_videos_len = get!(all_videos, Drop);
+    let dropped_videos_len = get!(all_videos, Dropped);
+
+    let subscriptions = subscriptions::get(app).await?;
+    let subscriptions_len = subscriptions.0.len();
+
+    let watchtime_status = {
+        let total_watch_time_raw = watched_videos
+            .iter()
+            .fold(Duration::default(), |acc, vid| acc + vid.watch_progress);
+
+        // Most things are watched at a speed of s (which is defined in the config file).
+        // Thus
+        //      y = x * s -> y / s = x
+        let total_watch_time = Duration::from_secs_f64(
+            (total_watch_time_raw.as_secs_f64()) / app.config.select.playback_speed,
+        );
+
+        let speed = app.config.select.playback_speed;
+
+        // Do not print the adjusted time, if the user has keep the speed level at 1.
+        #[allow(clippy::float_cmp)]
+        if speed == 1.0 {
+            format!(
+                "Total Watchtime: {}\n",
+                MaybeDuration::from_std(total_watch_time_raw)
+            )
+        } else {
+            format!(
+                "Total Watchtime: {} (at {speed} speed: {})\n",
+                MaybeDuration::from_std(total_watch_time_raw),
+                MaybeDuration::from_std(total_watch_time),
+            )
+        }
+    };
+
+    let watch_rate: f64 = {
+        fn to_f64(input: usize) -> f64 {
+            f64::from(u32::try_from(input).expect("This should never exceed u32::MAX"))
+        }
+
+        let count =
+            to_f64(watched_videos_len) / (to_f64(drop_videos_len) + to_f64(dropped_videos_len));
+        count * 100.0
+    };
+
+    let cache_usage_raw = Downloader::get_current_cache_allocation(app)
+        .await
+        .context("Failed to get current cache allocation")?;
+    let cache_usage: Bytes = cache_usage_raw;
+    println!(
+        "\
+Picked   Videos: {picked_videos_len}
+
+Watch    Videos: {watch_videos_len}
+Cached   Videos: {cached_videos_len}
+Watched  Videos: {watched_videos_len} (watch rate: {watch_rate:.2} %)
+
+Drop     Videos: {drop_videos_len}
+Dropped  Videos: {dropped_videos_len}
+
+{watchtime_status}
+
+  Subscriptions: {subscriptions_len}
+    Cache usage: {cache_usage}"
+    );
+
+    Ok(())
+}
+
+pub fn config(app: &App) -> Result<()> {
+    let config_str = toml::to_string(&app.config)?;
+
+    print!("{config_str}");
+
+    Ok(())
+}
diff --git a/crates/yt/src/storage/migrate/mod.rs b/crates/yt/src/storage/migrate/mod.rs
new file mode 100644
index 0000000..953d079
--- /dev/null
+++ b/crates/yt/src/storage/migrate/mod.rs
@@ -0,0 +1,279 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    fmt::Display,
+    future::Future,
+    time::{SystemTime, UNIX_EPOCH},
+};
+
+use anyhow::{Context, Result, bail};
+use chrono::TimeDelta;
+use log::{debug, info};
+use sqlx::{Sqlite, SqlitePool, Transaction, query};
+
+use crate::app::App;
+
+macro_rules! make_upgrade {
+    ($app:expr, $old_version:expr, $new_version:expr, $sql_name:expr) => {
+        add_error_context(
+            async {
+                let mut tx = $app
+                    .database
+                    .begin()
+                    .await
+                    .context("Failed to start the update transaction")?;
+                debug!("Migrating: {} -> {}", $old_version, $new_version);
+
+                sqlx::raw_sql(include_str!($sql_name))
+                    .execute(&mut *tx)
+                    .await
+                    .context("Failed to run the update sql script")?;
+
+                set_db_version(
+                    &mut tx,
+                    if $old_version == Self::Empty {
+                        // There is no previous version we would need to remove
+                        None
+                    } else {
+                        Some($old_version)
+                    },
+                    $new_version,
+                )
+                .await
+                .with_context(|| format!("Failed to set the new version ({})", $new_version))?;
+
+                tx.commit()
+                    .await
+                    .context("Failed to commit the update transaction")?;
+
+                // NOTE: This is needed, so that sqlite "sees" our changes to the table
+                // without having to reconnect. <2025-02-18>
+                query!("VACUUM")
+                    .execute(&$app.database)
+                    .await
+                    .context("Failed to vacuum database")?;
+
+                Ok(())
+            },
+            $new_version,
+        )
+        .await?;
+
+        Box::pin($new_version.update($app)).await.context(concat!(
+            "While updating to version: ",
+            stringify!($new_version)
+        ))
+    };
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
+pub enum DbVersion {
+    /// The database is not yet initialized.
+    Empty,
+
+    /// The first database version.
+    /// Introduced: 2025-02-16.
+    Zero,
+
+    /// Introduced: 2025-02-17.
+    One,
+
+    /// Introduced: 2025-02-18.
+    Two,
+
+    /// Introduced: 2025-03-21.
+    Three,
+}
+const CURRENT_VERSION: DbVersion = DbVersion::Three;
+
+async fn add_error_context(
+    function: impl Future<Output = Result<()>>,
+    level: DbVersion,
+) -> Result<()> {
+    function
+        .await
+        .with_context(|| format!("Failed to migrate database to version: {level}"))
+}
+
+async fn set_db_version(
+    tx: &mut Transaction<'_, Sqlite>,
+    old_version: Option<DbVersion>,
+    new_version: DbVersion,
+) -> Result<()> {
+    let valid_from = get_current_date();
+
+    if let Some(old_version) = old_version {
+        let valid_to = valid_from + 1;
+        let old_version = old_version.as_sql_integer();
+
+        query!(
+            "UPDATE version SET valid_to = ? WHERE namespace = 'yt' AND number = ?;",
+            valid_to,
+            old_version
+        )
+        .execute(&mut *(*tx))
+        .await?;
+    }
+
+    let version = new_version.as_sql_integer();
+
+    query!(
+        "INSERT INTO version (namespace, number, valid_from, valid_to) VALUES ('yt', ?, ?, NULL);",
+        version,
+        valid_from
+    )
+    .execute(&mut *(*tx))
+    .await?;
+
+    Ok(())
+}
+
+impl DbVersion {
+    fn as_sql_integer(self) -> i32 {
+        match self {
+            DbVersion::Zero => 0,
+            DbVersion::One => 1,
+            DbVersion::Two => 2,
+            DbVersion::Three => 3,
+
+            DbVersion::Empty => unreachable!("A empty version does not have an associated integer"),
+        }
+    }
+
+    fn from_db(number: i64, namespace: &str) -> Result<Self> {
+        match (number, namespace) {
+            (0, "yt") => Ok(DbVersion::Zero),
+            (1, "yt") => Ok(DbVersion::One),
+            (2, "yt") => Ok(DbVersion::Two),
+            (3, "yt") => Ok(DbVersion::Three),
+
+            (0, other) => bail!("Db version is Zero, but got unknown namespace: '{other}'"),
+            (1, other) => bail!("Db version is One, but got unknown namespace: '{other}'"),
+            (2, other) => bail!("Db version is Two, but got unknown namespace: '{other}'"),
+            (3, other) => bail!("Db version is Three, but got unknown namespace: '{other}'"),
+
+            (other, "yt") => bail!("Got unkown version for 'yt' namespace: {other}"),
+            (num, nasp) => bail!("Got unkown version number ({num}) and namespace ('{nasp}')"),
+        }
+    }
+
+    /// Try to update the database from version [`self`] to the [`CURRENT_VERSION`].
+    ///
+    /// Each update is atomic, so if this function fails you are still guaranteed to have a
+    /// database at version `get_version`.
+    #[allow(clippy::too_many_lines)]
+    async fn update(self, app: &App) -> Result<()> {
+        match self {
+            Self::Empty => {
+                make_upgrade! {app, Self::Empty, Self::Zero, "./sql/0_Empty_to_Zero.sql"}
+            }
+
+            Self::Zero => {
+                make_upgrade! {app, Self::Zero, Self::One, "./sql/1_Zero_to_One.sql"}
+            }
+
+            Self::One => {
+                make_upgrade! {app, Self::One, Self::Two, "./sql/2_One_to_Two.sql"}
+            }
+
+            Self::Two => {
+                make_upgrade! {app, Self::Two, Self::Three, "./sql/3_Two_to_Three.sql"}
+            }
+
+            // This is the current_version
+            Self::Three => {
+                assert_eq!(self, CURRENT_VERSION);
+                assert_eq!(self, get_version(app).await?);
+                Ok(())
+            }
+        }
+    }
+}
+impl Display for DbVersion {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        // It is a unit only enum, thus we can simply use the Debug formatting
+        <Self as std::fmt::Debug>::fmt(self, f)
+    }
+}
+
+/// Returns the current data as UNIX time stamp.
+fn get_current_date() -> i64 {
+    let start = SystemTime::now();
+    let seconds_since_epoch: TimeDelta = TimeDelta::from_std(
+        start
+            .duration_since(UNIX_EPOCH)
+            .expect("Time went backwards"),
+    )
+    .expect("Time does not go backwards");
+
+    // All database dates should be after the UNIX_EPOCH (and thus positiv)
+    seconds_since_epoch.num_milliseconds()
+}
+
+/// Return the current database version.
+///
+/// # Panics
+/// Only if internal assertions fail.
+pub async fn get_version(app: &App) -> Result<DbVersion> {
+    get_version_db(&app.database).await
+}
+/// Return the current database version.
+///
+/// In contrast to the [`get_version`] function, this function does not
+/// a fully instantiated [`App`], a database connection suffices.
+///
+/// # Panics
+/// Only if internal assertions fail.
+pub async fn get_version_db(pool: &SqlitePool) -> Result<DbVersion> {
+    let version_table_exists = {
+        let query = query!(
+            "SELECT 1 as result FROM sqlite_master WHERE type = 'table' AND name = 'version'"
+        )
+        .fetch_optional(pool)
+        .await?;
+        if let Some(output) = query {
+            assert_eq!(output.result, 1);
+            true
+        } else {
+            false
+        }
+    };
+    if !version_table_exists {
+        return Ok(DbVersion::Empty);
+    }
+
+    let current_version = query!(
+        "
+        SELECT namespace, number FROM version WHERE valid_to IS NULL;
+        "
+    )
+    .fetch_one(pool)
+    .await
+    .context("Failed to fetch version number")?;
+
+    DbVersion::from_db(current_version.number, current_version.namespace.as_str())
+}
+
+pub async fn migrate_db(app: &App) -> Result<()> {
+    let current_version = get_version(app)
+        .await
+        .context("Failed to determine initial version")?;
+
+    if current_version == CURRENT_VERSION {
+        return Ok(());
+    }
+
+    info!("Migrate database from version '{current_version}' to version '{CURRENT_VERSION}'");
+
+    current_version.update(app).await?;
+
+    Ok(())
+}
diff --git a/crates/yt/src/storage/migrate/sql/0_Empty_to_Zero.sql b/crates/yt/src/storage/migrate/sql/0_Empty_to_Zero.sql
new file mode 100644
index 0000000..d703bfc
--- /dev/null
+++ b/crates/yt/src/storage/migrate/sql/0_Empty_to_Zero.sql
@@ -0,0 +1,72 @@
+-- yt - A fully featured command line YouTube client
+--
+-- Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+-- Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+-- SPDX-License-Identifier: GPL-3.0-or-later
+--
+-- This file is part of Yt.
+--
+-- You should have received a copy of the License along with this program.
+-- If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+-- All tables should be declared STRICT, as I actually like to have types checking (and a
+-- db that doesn't lie to me).
+
+-- Keep this table in sync with the `DbVersion` enumeration.
+CREATE TABLE version (
+    -- The `namespace` is only useful, if other tools ever build on this database
+    namespace   TEXT           NOT NULL,
+
+    -- The version.
+    number      INTEGER UNIQUE NOT NULL PRIMARY KEY,
+
+    -- The validity of this version as UNIX time stamp
+    valid_from  INTEGER        NOT NULL CHECK (valid_from < valid_to),
+    -- If set to `NULL`, represents the current version
+    valid_to    INTEGER UNIQUE          CHECK (valid_to > valid_from)
+) STRICT;
+
+-- Keep this table in sync with the `Video` structure
+CREATE TABLE videos (
+    cache_path                  TEXT UNIQUE                    CHECK (CASE WHEN cache_path IS NOT NULL THEN
+                                                                            status == 2
+                                                                      ELSE
+                                                                            1
+                                                                      END),
+    description                 TEXT,
+    duration                    REAL,
+    extractor_hash              TEXT UNIQUE NOT NULL PRIMARY KEY,
+    last_status_change          INTEGER     NOT NULL,
+    parent_subscription_name    TEXT,
+    priority                    INTEGER     NOT NULL DEFAULT 0,
+    publish_date                INTEGER,
+    status                      INTEGER     NOT NULL DEFAULT 0 CHECK (status IN (0, 1, 2, 3, 4, 5) AND
+                                                                      CASE WHEN status == 2 THEN
+                                                                           cache_path IS NOT NULL
+                                                                      ELSE
+                                                                           1
+                                                                      END AND
+                                                                      CASE WHEN status != 2 THEN
+                                                                           cache_path IS NULL
+                                                                      ELSE
+                                                                           1
+                                                                      END),
+    status_change               INTEGER     NOT NULL DEFAULT 0 CHECK (status_change IN (0, 1)),
+    thumbnail_url               TEXT,
+    title                       TEXT        NOT NULL,
+    url                         TEXT UNIQUE NOT NULL
+) STRICT;
+
+-- Store additional metadata for the videos marked to be watched
+CREATE TABLE video_options (
+    extractor_hash              TEXT UNIQUE NOT NULL PRIMARY KEY,
+    subtitle_langs              TEXT        NOT NULL,
+    playback_speed              REAL        NOT NULL,
+    FOREIGN KEY(extractor_hash) REFERENCES videos (extractor_hash)
+) STRICT;
+
+-- Store subscriptions
+CREATE TABLE subscriptions (
+    name              TEXT UNIQUE NOT NULL PRIMARY KEY,
+    url               TEXT        NOT NULL
+) STRICT;
diff --git a/crates/yt/src/storage/migrate/sql/1_Zero_to_One.sql b/crates/yt/src/storage/migrate/sql/1_Zero_to_One.sql
new file mode 100644
index 0000000..da9315b
--- /dev/null
+++ b/crates/yt/src/storage/migrate/sql/1_Zero_to_One.sql
@@ -0,0 +1,28 @@
+-- yt - A fully featured command line YouTube client
+--
+-- Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+-- SPDX-License-Identifier: GPL-3.0-or-later
+--
+-- This file is part of Yt.
+--
+-- You should have received a copy of the License along with this program.
+-- If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+-- Is the video currently in a playlist?
+ALTER TABLE videos ADD in_playlist INTEGER NOT NULL DEFAULT 0 CHECK (in_playlist IN (0, 1));
+UPDATE videos SET in_playlist = 0;
+
+-- Is it 'focused' (i.e., the select video)?
+-- Only of video should be focused at a time.
+ALTER TABLE videos
+ADD COLUMN is_focused INTEGER NOT NULL DEFAULT 0
+CHECK (is_focused IN (0, 1));
+UPDATE videos SET is_focused = 0;
+
+-- The progress the user made in watching the video.
+ALTER TABLE videos ADD watch_progress INTEGER NOT NULL DEFAULT 0 CHECK (watch_progress <= duration);
+-- Assume, that the user has watched the video to end, if it is marked as watched
+UPDATE videos SET watch_progress = ifnull(duration, 0) WHERE status = 3;
+UPDATE videos SET watch_progress = 0 WHERE status != 3;
+
+ALTER TABLE videos DROP COLUMN status_change;
diff --git a/crates/yt/src/storage/migrate/sql/2_One_to_Two.sql b/crates/yt/src/storage/migrate/sql/2_One_to_Two.sql
new file mode 100644
index 0000000..806de07
--- /dev/null
+++ b/crates/yt/src/storage/migrate/sql/2_One_to_Two.sql
@@ -0,0 +1,11 @@
+-- yt - A fully featured command line YouTube client
+--
+-- Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+-- SPDX-License-Identifier: GPL-3.0-or-later
+--
+-- This file is part of Yt.
+--
+-- You should have received a copy of the License along with this program.
+-- If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+ALTER TABLE videos DROP in_playlist;
diff --git a/crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql b/crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql
new file mode 100644
index 0000000..b33f849
--- /dev/null
+++ b/crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql
@@ -0,0 +1,85 @@
+-- yt - A fully featured command line YouTube client
+--
+-- Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+-- SPDX-License-Identifier: GPL-3.0-or-later
+--
+-- This file is part of Yt.
+--
+-- You should have received a copy of the License along with this program.
+-- If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+
+-- 1. Create new table
+-- 2. Copy data
+-- 3. Drop old table
+-- 4. Rename new into old
+
+-- remove the original TRANSACTION
+COMMIT TRANSACTION;
+
+-- tweak config
+PRAGMA foreign_keys=OFF;
+
+-- start your own TRANSACTION
+BEGIN TRANSACTION;
+
+CREATE TABLE videos_new (
+    cache_path                  TEXT    UNIQUE                       CHECK (CASE
+                                                                              WHEN cache_path IS NOT NULL THEN status == 2
+                                                                              ELSE 1
+                                                                            END),
+    description                 TEXT,
+    duration                    REAL,
+    extractor_hash              TEXT    UNIQUE NOT NULL PRIMARY KEY,
+    last_status_change          INTEGER        NOT NULL,
+    parent_subscription_name    TEXT,
+    priority                    INTEGER        NOT NULL DEFAULT 0,
+    publish_date                INTEGER,
+    status                      INTEGER        NOT NULL DEFAULT 0    CHECK (status IN (0, 1, 2, 3, 4, 5) AND
+                                                                            CASE
+                                                                              WHEN status == 2 THEN cache_path IS NOT NULL
+                                                                              WHEN status != 2 THEN cache_path IS NULL
+                                                                              ELSE 1
+                                                                            END),
+    thumbnail_url               TEXT,
+    title                       TEXT           NOT NULL,
+    url                         TEXT    UNIQUE NOT NULL,
+    is_focused                  INTEGER UNIQUE          DEFAULT NULL CHECK (CASE
+                                                                              WHEN is_focused IS NOT NULL THEN is_focused == 1
+                                                                              ELSE 1
+                                                                            END),
+    watch_progress              INTEGER        NOT NULL DEFAULT 0    CHECK (watch_progress <= duration)
+) STRICT;
+
+INSERT INTO videos_new SELECT
+    videos.cache_path,
+    videos.description,
+    videos.duration,
+    videos.extractor_hash,
+    videos.last_status_change,
+    videos.parent_subscription_name,
+    videos.priority,
+    videos.publish_date,
+    videos.status,
+    videos.thumbnail_url,
+    videos.title,
+    videos.url,
+    dummy.is_focused,
+    videos.watch_progress
+FROM videos, (SELECT NULL AS is_focused) AS dummy;
+
+DROP TABLE videos;
+
+ALTER TABLE videos_new RENAME TO videos;
+
+-- check foreign key constraint still upholding.
+PRAGMA foreign_key_check;
+
+-- commit your own TRANSACTION
+COMMIT TRANSACTION;
+
+-- rollback all config you setup before.
+PRAGMA foreign_keys=ON;
+
+-- start a new TRANSACTION to let migrator commit it.
+BEGIN TRANSACTION;
diff --git a/crates/yt_dlp/src/wrapper/mod.rs b/crates/yt/src/storage/mod.rs
index 3fe3247..d352b41 100644
--- a/crates/yt_dlp/src/wrapper/mod.rs
+++ b/crates/yt/src/storage/mod.rs
@@ -1,6 +1,7 @@
 // yt - A fully featured command line YouTube client
 //
 // Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
 // SPDX-License-Identifier: GPL-3.0-or-later
 //
 // This file is part of Yt.
@@ -8,5 +9,6 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-pub mod info_json;
-// pub mod yt_dlp_options;
+pub mod migrate;
+pub mod subscriptions;
+pub mod video_database;
diff --git a/crates/yt/src/storage/subscriptions.rs b/crates/yt/src/storage/subscriptions.rs
new file mode 100644
index 0000000..0e8ae85
--- /dev/null
+++ b/crates/yt/src/storage/subscriptions.rs
@@ -0,0 +1,141 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+//! Handle subscriptions
+
+use std::collections::HashMap;
+
+use anyhow::Result;
+use log::debug;
+use sqlx::query;
+use url::Url;
+use yt_dlp::{json_cast, options::YoutubeDLOptions};
+
+use crate::{app::App, unreachable::Unreachable};
+
+#[derive(Clone, Debug)]
+pub struct Subscription {
+    /// The human readable name of this subscription
+    pub name: String,
+
+    /// The URL this subscription subscribes to
+    pub url: Url,
+}
+
+impl Subscription {
+    #[must_use]
+    pub fn new(name: String, url: Url) -> Self {
+        Self { name, url }
+    }
+}
+
+/// Check whether an URL could be used as a subscription URL
+pub async fn check_url(url: Url) -> Result<bool> {
+    let yt_dlp = YoutubeDLOptions::new()
+        .set("playliststart", 1)
+        .set("playlistend", 10)
+        .set("noplaylist", false)
+        .set("extract_flat", "in_playlist")
+        .build()?;
+
+    let info = yt_dlp.extract_info(&url, false, false)?;
+
+    debug!("{info:#?}");
+
+    Ok(info.get("_type").map(|v| json_cast!(v, as_str)) == Some("playlist"))
+}
+
+#[derive(Default, Debug)]
+pub struct Subscriptions(pub(crate) HashMap<String, Subscription>);
+
+/// Remove all subscriptions
+pub async fn remove_all(app: &App) -> Result<()> {
+    query!(
+        "
+        DELETE FROM subscriptions;
+    ",
+    )
+    .execute(&app.database)
+    .await?;
+
+    Ok(())
+}
+
+/// Get a list of subscriptions
+pub async fn get(app: &App) -> Result<Subscriptions> {
+    let raw_subs = query!(
+        "
+        SELECT *
+        FROM subscriptions;
+    "
+    )
+    .fetch_all(&app.database)
+    .await?;
+
+    let subscriptions: HashMap<String, Subscription> = raw_subs
+        .into_iter()
+        .map(|sub| {
+            (
+                sub.name.clone(),
+                Subscription::new(
+                    sub.name,
+                    Url::parse(&sub.url).unreachable("It was an URL, when we inserted it."),
+                ),
+            )
+        })
+        .collect();
+
+    Ok(Subscriptions(subscriptions))
+}
+
+pub async fn add_subscription(app: &App, sub: &Subscription) -> Result<()> {
+    let url = sub.url.to_string();
+
+    query!(
+        "
+        INSERT INTO subscriptions (
+            name,
+            url
+        ) VALUES (?, ?);
+    ",
+        sub.name,
+        url
+    )
+    .execute(&app.database)
+    .await?;
+
+    println!("Subscribed to '{}' at '{}'", sub.name, sub.url);
+    Ok(())
+}
+
+/// # Panics
+/// Only if assertions fail
+pub async fn remove_subscription(app: &App, sub: &Subscription) -> Result<()> {
+    let output = query!(
+        "
+        DELETE FROM subscriptions
+        WHERE name = ?
+    ",
+        sub.name,
+    )
+    .execute(&app.database)
+    .await?;
+
+    assert_eq!(
+        output.rows_affected(),
+        1,
+        "The remove subscriptino query did effect more (or less) than one row. This is a bug."
+    );
+
+    println!("Unsubscribed from '{}' at '{}'", sub.name, sub.url);
+
+    Ok(())
+}
diff --git a/crates/yt/src/storage/video_database/downloader.rs b/crates/yt/src/storage/video_database/downloader.rs
new file mode 100644
index 0000000..a95081e
--- /dev/null
+++ b/crates/yt/src/storage/video_database/downloader.rs
@@ -0,0 +1,130 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::path::{Path, PathBuf};
+
+use anyhow::Result;
+use log::debug;
+use sqlx::query;
+
+use crate::{
+    app::App,
+    storage::video_database::{VideoStatus, VideoStatusMarker},
+    unreachable::Unreachable,
+    video_from_record,
+};
+
+use super::{ExtractorHash, Video};
+
+/// Returns to next video which should be downloaded. This respects the priority assigned by select.
+/// It does not return videos, which are already cached.
+///
+/// # Panics
+/// Only if assertions fail.
+pub async fn get_next_uncached_video(app: &App) -> Result<Option<Video>> {
+    let status = VideoStatus::Watch.as_marker().as_db_integer();
+
+    // NOTE: The ORDER BY statement should be the same as the one in [`get::videos`].<2024-08-22>
+    let result = query!(
+        r#"
+        SELECT *
+        FROM videos
+        WHERE status = ? AND cache_path IS NULL
+        ORDER BY priority DESC, publish_date DESC
+        LIMIT 1;
+    "#,
+        status
+    )
+    .fetch_one(&app.database)
+    .await;
+
+    if let Err(sqlx::Error::RowNotFound) = result {
+        Ok(None)
+    } else {
+        let base = result?;
+
+        Ok(Some(video_from_record! {base}))
+    }
+}
+
+/// Update the cached path of a video. Will be set to NULL if the path is None
+/// This will also set the status to `Cached` when path is Some, otherwise it set's the status to
+/// `Watch`.
+pub async fn set_video_cache_path(
+    app: &App,
+    video: &ExtractorHash,
+    path: Option<&Path>,
+) -> Result<()> {
+    if let Some(path) = path {
+        debug!(
+            "Setting cache path from '{}' to '{}'",
+            video.into_short_hash(app).await?,
+            path.display()
+        );
+
+        let path_str = path.display().to_string();
+        let extractor_hash = video.hash().to_string();
+        let status = VideoStatusMarker::Cached.as_db_integer();
+
+        query!(
+            r#"
+            UPDATE videos
+            SET cache_path = ?, status = ?
+            WHERE extractor_hash = ?;
+        "#,
+            path_str,
+            status,
+            extractor_hash
+        )
+        .execute(&app.database)
+        .await?;
+
+        Ok(())
+    } else {
+        debug!(
+            "Setting cache path from '{}' to NULL",
+            video.into_short_hash(app).await?,
+        );
+
+        let extractor_hash = video.hash().to_string();
+        let status = VideoStatus::Watch.as_marker().as_db_integer();
+
+        query!(
+            r#"
+            UPDATE videos
+            SET cache_path = NULL, status = ?
+            WHERE extractor_hash = ?;
+        "#,
+            status,
+            extractor_hash
+        )
+        .execute(&app.database)
+        .await?;
+
+        Ok(())
+    }
+}
+
+/// Returns the number of cached videos
+pub async fn get_allocated_cache(app: &App) -> Result<u32> {
+    let count = query!(
+        r#"
+        SELECT COUNT(cache_path) as count
+        FROM videos
+        WHERE cache_path IS NOT NULL;
+"#,
+    )
+    .fetch_one(&app.database)
+    .await?;
+
+    Ok(u32::try_from(count.count)
+        .unreachable("The value should be strictly positive (and bolow `u32::Max`)"))
+}
diff --git a/crates/yt/src/storage/video_database/extractor_hash.rs b/crates/yt/src/storage/video_database/extractor_hash.rs
new file mode 100644
index 0000000..df545d7
--- /dev/null
+++ b/crates/yt/src/storage/video_database/extractor_hash.rs
@@ -0,0 +1,163 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{collections::HashSet, fmt::Display, str::FromStr};
+
+use anyhow::{Context, Result, bail};
+use blake3::Hash;
+use log::debug;
+use tokio::sync::OnceCell;
+
+use crate::{app::App, storage::video_database::get::get_all_hashes, unreachable::Unreachable};
+
+static EXTRACTOR_HASH_LENGTH: OnceCell<usize> = OnceCell::const_new();
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
+pub struct ExtractorHash {
+    hash: Hash,
+}
+
+impl Display for ExtractorHash {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        self.hash.fmt(f)
+    }
+}
+
+#[derive(Debug, Clone)]
+pub struct ShortHash(String);
+
+impl Display for ShortHash {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        self.0.fmt(f)
+    }
+}
+
+#[derive(Debug, Clone)]
+#[allow(clippy::module_name_repetitions)]
+pub struct LazyExtractorHash {
+    value: ShortHash,
+}
+
+impl FromStr for LazyExtractorHash {
+    type Err = anyhow::Error;
+
+    fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
+        // perform some cheap validation
+        if s.len() > 64 {
+            bail!("A hash can only contain 64 bytes!");
+        }
+
+        Ok(Self {
+            value: ShortHash(s.to_owned()),
+        })
+    }
+}
+
+impl LazyExtractorHash {
+    /// Turn the [`LazyExtractorHash`] into the [`ExtractorHash`]
+    pub async fn realize(self, app: &App) -> Result<ExtractorHash> {
+        ExtractorHash::from_short_hash(app, &self.value).await
+    }
+}
+
+impl ExtractorHash {
+    #[must_use]
+    pub fn from_hash(hash: Hash) -> Self {
+        Self { hash }
+    }
+    pub async fn from_short_hash(app: &App, s: &ShortHash) -> Result<Self> {
+        Ok(Self {
+            hash: Self::short_hash_to_full_hash(app, s).await?,
+        })
+    }
+
+    #[must_use]
+    pub fn hash(&self) -> &Hash {
+        &self.hash
+    }
+
+    pub async fn into_short_hash(&self, app: &App) -> Result<ShortHash> {
+        let needed_chars = if let Some(needed_chars) = EXTRACTOR_HASH_LENGTH.get() {
+            *needed_chars
+        } else {
+            let needed_chars = self
+                .get_needed_char_len(app)
+                .await
+                .context("Failed to calculate needed char length")?;
+            EXTRACTOR_HASH_LENGTH.set(needed_chars).unreachable(
+                "This should work at this stage, as we checked above that it is empty.",
+            );
+
+            needed_chars
+        };
+
+        Ok(ShortHash(
+            self.hash()
+                .to_hex()
+                .chars()
+                .take(needed_chars)
+                .collect::<String>(),
+        ))
+    }
+
+    async fn short_hash_to_full_hash(app: &App, s: &ShortHash) -> Result<Hash> {
+        let all_hashes = get_all_hashes(app)
+            .await
+            .context("Failed to fetch all extractor -hashesh from database")?;
+
+        let needed_chars = s.0.len();
+
+        for hash in all_hashes {
+            if hash.to_hex()[..needed_chars] == s.0 {
+                return Ok(hash);
+            }
+        }
+
+        bail!("Your shortend hash, does not match a real hash (this is probably a bug)!");
+    }
+
+    async fn get_needed_char_len(&self, app: &App) -> Result<usize> {
+        debug!("Calculating the needed hash char length");
+        let all_hashes = get_all_hashes(app)
+            .await
+            .context("Failed to fetch all extractor -hashesh from database")?;
+
+        let all_char_vec_hashes = all_hashes
+            .into_iter()
+            .map(|hash| hash.to_hex().chars().collect::<Vec<char>>())
+            .collect::<Vec<Vec<_>>>();
+
+        // This value should be updated later, if not rust will panic in the assertion.
+        let mut needed_chars: usize = 1000;
+        'outer: for i in 1..64 {
+            let i_chars: Vec<String> = all_char_vec_hashes
+                .iter()
+                .map(|vec| vec.iter().take(i).collect::<String>())
+                .collect();
+
+            let mut uniqnes_hashmap: HashSet<String> = HashSet::new();
+            for ch in i_chars {
+                if !uniqnes_hashmap.insert(ch) {
+                    // The key was already in the hash map, thus we have a duplicated char and need
+                    // at least one char more
+                    continue 'outer;
+                }
+            }
+
+            needed_chars = i;
+            break 'outer;
+        }
+
+        assert!(needed_chars <= 64, "Hashes are only 64 bytes long");
+
+        Ok(needed_chars)
+    }
+}
diff --git a/crates/yt/src/storage/video_database/get/mod.rs b/crates/yt/src/storage/video_database/get/mod.rs
new file mode 100644
index 0000000..e76131e
--- /dev/null
+++ b/crates/yt/src/storage/video_database/get/mod.rs
@@ -0,0 +1,307 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+//! These functions interact with the storage db in a read-only way. They are added on-demand (as
+//! you could theoretically just could do everything with the `get_videos` function), as
+//! performance or convince requires.
+use std::{fs::File, path::PathBuf};
+
+use anyhow::{Context, Result, bail};
+use blake3::Hash;
+use log::{debug, trace};
+use sqlx::query;
+use yt_dlp::info_json::InfoJson;
+
+use crate::{
+    app::App,
+    storage::{
+        subscriptions::Subscription,
+        video_database::{Video, extractor_hash::ExtractorHash},
+    },
+    unreachable::Unreachable,
+};
+
+use super::{MpvOptions, VideoOptions, VideoStatus, VideoStatusMarker, YtDlpOptions};
+
+mod playlist;
+pub use playlist::*;
+
+#[macro_export]
+macro_rules! video_from_record {
+    ($record:expr) => {
+        Video {
+            description: $record.description.clone(),
+            duration: $crate::storage::video_database::MaybeDuration::from_maybe_secs_f64(
+                $record.duration,
+            ),
+            extractor_hash:
+                $crate::storage::video_database::extractor_hash::ExtractorHash::from_hash(
+                    $record
+                        .extractor_hash
+                        .parse()
+                        .expect("The db hash should be a valid blake3 hash"),
+                ),
+            last_status_change: $crate::storage::video_database::TimeStamp::from_secs(
+                $record.last_status_change,
+            ),
+            parent_subscription_name: $record.parent_subscription_name.clone(),
+            publish_date: $record
+                .publish_date
+                .map(|pd| $crate::storage::video_database::TimeStamp::from_secs(pd)),
+            status: {
+                let marker = $crate::storage::video_database::VideoStatusMarker::from_db_integer(
+                    $record.status,
+                );
+
+                let optional = if let Some(cache_path) = &$record.cache_path {
+                    Some((
+                        PathBuf::from(cache_path),
+                        if $record.is_focused == Some(1) {
+                            true
+                        } else {
+                            false
+                        },
+                    ))
+                } else {
+                    None
+                };
+
+                $crate::storage::video_database::VideoStatus::from_marker(marker, optional)
+            },
+            thumbnail_url: if let Some(url) = &$record.thumbnail_url {
+                Some(url::Url::parse(&url).expect("Parsing this as url should always work"))
+            } else {
+                None
+            },
+            title: $record.title.clone(),
+            url: url::Url::parse(&$record.url).expect("Parsing this as url should always work"),
+            priority: $crate::storage::video_database::Priority::from($record.priority),
+
+            watch_progress: std::time::Duration::from_secs(
+                u64::try_from($record.watch_progress).expect("The record is positive i64"),
+            ),
+        }
+    };
+}
+
+/// Returns the videos that are in the `allowed_states`.
+///
+/// # Panics
+/// Only, if assertions fail.
+pub async fn videos(app: &App, allowed_states: &[VideoStatusMarker]) -> Result<Vec<Video>> {
+    fn test(all_states: &[VideoStatusMarker], check: VideoStatusMarker) -> Option<i64> {
+        if all_states.contains(&check) {
+            trace!("State '{check:?}' marked as active");
+            Some(check.as_db_integer())
+        } else {
+            trace!("State '{check:?}' marked as inactive");
+            None
+        }
+    }
+    fn states_to_string(allowed_states: &[VideoStatusMarker]) -> String {
+        let mut states = allowed_states
+            .iter()
+            .fold(String::from("&["), |mut acc, state| {
+                acc.push_str(state.as_str());
+                acc.push_str(", ");
+                acc
+            });
+        states = states.trim().to_owned();
+        states = states.trim_end_matches(',').to_owned();
+        states.push(']');
+        states
+    }
+
+    debug!(
+        "Fetching videos in the states: '{}'",
+        states_to_string(allowed_states)
+    );
+    let active_pick: Option<i64> = test(allowed_states, VideoStatusMarker::Pick);
+    let active_watch: Option<i64> = test(allowed_states, VideoStatusMarker::Watch);
+    let active_cached: Option<i64> = test(allowed_states, VideoStatusMarker::Cached);
+    let active_watched: Option<i64> = test(allowed_states, VideoStatusMarker::Watched);
+    let active_drop: Option<i64> = test(allowed_states, VideoStatusMarker::Drop);
+    let active_dropped: Option<i64> = test(allowed_states, VideoStatusMarker::Dropped);
+
+    let videos = query!(
+        r"
+        SELECT *
+          FROM videos
+          WHERE status IN (?,?,?,?,?,?)
+          ORDER BY priority DESC, publish_date DESC;
+          ",
+        active_pick,
+        active_watch,
+        active_cached,
+        active_watched,
+        active_drop,
+        active_dropped,
+    )
+    .fetch_all(&app.database)
+    .await
+    .with_context(|| {
+        format!(
+            "Failed to query videos with states: '{}'",
+            states_to_string(allowed_states)
+        )
+    })?;
+
+    let real_videos: Vec<Video> = videos
+        .iter()
+        .map(|base| -> Video {
+            video_from_record! {base}
+        })
+        .collect();
+
+    Ok(real_videos)
+}
+
+pub fn video_info_json(video: &Video) -> Result<Option<InfoJson>> {
+    if let VideoStatus::Cached { mut cache_path, .. } = video.status.clone() {
+        if !cache_path.set_extension("info.json") {
+            bail!(
+                "Failed to change path extension to 'info.json': {}",
+                cache_path.display()
+            );
+        }
+        let info_json_string = File::open(cache_path)?;
+        let info_json: InfoJson = serde_json::from_reader(&info_json_string)?;
+
+        Ok(Some(info_json))
+    } else {
+        Ok(None)
+    }
+}
+
+pub async fn video_by_hash(app: &App, hash: &ExtractorHash) -> Result<Video> {
+    let ehash = hash.hash().to_string();
+
+    let raw_video = query!(
+        "
+        SELECT * FROM videos WHERE extractor_hash = ?;
+        ",
+        ehash
+    )
+    .fetch_one(&app.database)
+    .await?;
+
+    Ok(video_from_record! {raw_video})
+}
+
+pub async fn get_all_hashes(app: &App) -> Result<Vec<Hash>> {
+    let hashes_hex = query!(
+        r#"
+        SELECT extractor_hash
+        FROM videos;
+    "#
+    )
+    .fetch_all(&app.database)
+    .await?;
+
+    Ok(hashes_hex
+        .iter()
+        .map(|hash| {
+            Hash::from_hex(&hash.extractor_hash).unreachable(
+                "These values started as blake3 hashes, they should stay blake3 hashes",
+            )
+        })
+        .collect())
+}
+
+pub async fn get_video_hashes(app: &App, subs: &Subscription) -> Result<Vec<Hash>> {
+    let hashes_hex = query!(
+        r#"
+        SELECT extractor_hash
+        FROM videos
+        WHERE parent_subscription_name = ?;
+    "#,
+        subs.name
+    )
+    .fetch_all(&app.database)
+    .await?;
+
+    Ok(hashes_hex
+        .iter()
+        .map(|hash| {
+            Hash::from_hex(&hash.extractor_hash).unreachable(
+                "These values started as blake3 hashes, they should stay blake3 hashes",
+            )
+        })
+        .collect())
+}
+
+pub async fn get_video_yt_dlp_opts(app: &App, hash: &ExtractorHash) -> Result<YtDlpOptions> {
+    let ehash = hash.hash().to_string();
+
+    let yt_dlp_options = query!(
+        r#"
+        SELECT subtitle_langs
+        FROM video_options
+        WHERE extractor_hash = ?;
+    "#,
+        ehash
+    )
+    .fetch_one(&app.database)
+    .await
+    .with_context(|| {
+        format!("Failed to fetch the `yt_dlp_video_opts` for video with hash: '{hash}'",)
+    })?;
+
+    Ok(YtDlpOptions {
+        subtitle_langs: yt_dlp_options.subtitle_langs,
+    })
+}
+pub async fn video_mpv_opts(app: &App, hash: &ExtractorHash) -> Result<MpvOptions> {
+    let ehash = hash.hash().to_string();
+
+    let mpv_options = query!(
+        r#"
+        SELECT playback_speed
+        FROM video_options
+        WHERE extractor_hash = ?;
+    "#,
+        ehash
+    )
+    .fetch_one(&app.database)
+    .await
+    .with_context(|| {
+        format!("Failed to fetch the `mpv_video_opts` for video with hash: '{hash}'")
+    })?;
+
+    Ok(MpvOptions {
+        playback_speed: mpv_options.playback_speed,
+    })
+}
+
+pub async fn get_video_opts(app: &App, hash: &ExtractorHash) -> Result<VideoOptions> {
+    let ehash = hash.hash().to_string();
+
+    let opts = query!(
+        r#"
+        SELECT playback_speed, subtitle_langs
+        FROM video_options
+        WHERE extractor_hash = ?;
+    "#,
+        ehash
+    )
+    .fetch_one(&app.database)
+    .await
+    .with_context(|| format!("Failed to fetch the `video_opts` for video with hash: '{hash}'"))?;
+
+    let mpv = MpvOptions {
+        playback_speed: opts.playback_speed,
+    };
+    let yt_dlp = YtDlpOptions {
+        subtitle_langs: opts.subtitle_langs,
+    };
+
+    Ok(VideoOptions { yt_dlp, mpv })
+}
diff --git a/crates/yt/src/storage/video_database/get/playlist/iterator.rs b/crates/yt/src/storage/video_database/get/playlist/iterator.rs
new file mode 100644
index 0000000..4c45bf7
--- /dev/null
+++ b/crates/yt/src/storage/video_database/get/playlist/iterator.rs
@@ -0,0 +1,101 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    collections::VecDeque,
+    path::{Path, PathBuf},
+};
+
+use crate::storage::video_database::{Video, VideoStatus};
+
+use super::Playlist;
+
+/// Turn a cached video into it's `cache_path`
+fn to_cache_video(video: Video) -> PathBuf {
+    if let VideoStatus::Cached { cache_path, .. } = video.status {
+        cache_path
+    } else {
+        unreachable!("ALl of these videos should be cached.")
+    }
+}
+
+#[derive(Debug)]
+pub struct PlaylistIterator {
+    paths: VecDeque<PathBuf>,
+}
+
+impl Iterator for PlaylistIterator {
+    type Item = <Playlist as IntoIterator>::Item;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        self.paths.pop_front()
+    }
+}
+
+impl DoubleEndedIterator for PlaylistIterator {
+    fn next_back(&mut self) -> Option<Self::Item> {
+        self.paths.pop_back()
+    }
+}
+
+impl IntoIterator for Playlist {
+    type Item = PathBuf;
+
+    type IntoIter = PlaylistIterator;
+
+    fn into_iter(self) -> Self::IntoIter {
+        let paths = self.videos.into_iter().map(to_cache_video).collect();
+        Self::IntoIter { paths }
+    }
+}
+
+#[derive(Debug)]
+pub struct PlaylistIteratorBorrowed<'a> {
+    paths: Vec<&'a Path>,
+    index: usize,
+}
+
+impl<'a> Iterator for PlaylistIteratorBorrowed<'a> {
+    type Item = <&'a Playlist as IntoIterator>::Item;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        let output = self.paths.get(self.index);
+        self.index += 1;
+        output.map(|v| &**v)
+    }
+}
+
+impl<'a> Playlist {
+    #[must_use]
+    pub fn iter(&'a self) -> PlaylistIteratorBorrowed<'a> {
+        <&Self as IntoIterator>::into_iter(self)
+    }
+}
+
+impl<'a> IntoIterator for &'a Playlist {
+    type Item = &'a Path;
+
+    type IntoIter = PlaylistIteratorBorrowed<'a>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        let paths = self
+            .videos
+            .iter()
+            .map(|vid| {
+                if let VideoStatus::Cached { cache_path, .. } = &vid.status {
+                    cache_path.as_path()
+                } else {
+                    unreachable!("ALl of these videos should be cached.")
+                }
+            })
+            .collect();
+        Self::IntoIter { paths, index: 0 }
+    }
+}
diff --git a/crates/yt/src/storage/video_database/get/playlist/mod.rs b/crates/yt/src/storage/video_database/get/playlist/mod.rs
new file mode 100644
index 0000000..f6aadbf
--- /dev/null
+++ b/crates/yt/src/storage/video_database/get/playlist/mod.rs
@@ -0,0 +1,167 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+//! This file contains the getters for the internal playlist
+
+use std::{ops::Add, path::PathBuf};
+
+use crate::{
+    app::App,
+    storage::video_database::{Video, VideoStatusMarker, extractor_hash::ExtractorHash},
+    video_from_record,
+};
+
+use anyhow::Result;
+use sqlx::query;
+
+pub mod iterator;
+
+/// Zero-based index into the internal playlist.
+#[derive(Debug, Clone, Copy)]
+pub struct PlaylistIndex(usize);
+
+impl From<PlaylistIndex> for usize {
+    fn from(value: PlaylistIndex) -> Self {
+        value.0
+    }
+}
+
+impl From<usize> for PlaylistIndex {
+    fn from(value: usize) -> Self {
+        Self(value)
+    }
+}
+
+impl Add<usize> for PlaylistIndex {
+    type Output = Self;
+
+    fn add(self, rhs: usize) -> Self::Output {
+        Self(self.0 + rhs)
+    }
+}
+
+impl Add for PlaylistIndex {
+    type Output = Self;
+
+    fn add(self, rhs: Self) -> Self::Output {
+        Self(self.0 + rhs.0)
+    }
+}
+
+/// A representation of the internal Playlist
+#[derive(Debug)]
+pub struct Playlist {
+    videos: Vec<Video>,
+}
+
+impl Playlist {
+    /// Return the videos of this playlist.
+    #[must_use]
+    pub fn as_videos(&self) -> &[Video] {
+        &self.videos
+    }
+
+    /// Turn this playlist to it's videos
+    #[must_use]
+    pub fn to_videos(self) -> Vec<Video> {
+        self.videos
+    }
+
+    /// Find the index of the video specified by the `video_hash`.
+    ///
+    /// # Panics
+    /// Only if internal assertions fail.
+    #[must_use]
+    pub fn find_index(&self, video_hash: &ExtractorHash) -> Option<PlaylistIndex> {
+        if let Some((index, value)) = self
+            .videos
+            .iter()
+            .enumerate()
+            .find(|(_, other)| other.extractor_hash == *video_hash)
+        {
+            assert_eq!(value.extractor_hash, *video_hash);
+            Some(PlaylistIndex(index))
+        } else {
+            None
+        }
+    }
+
+    /// Select a video based on it's index
+    #[must_use]
+    pub fn get(&self, index: PlaylistIndex) -> Option<&Video> {
+        self.videos.get(index.0)
+    }
+
+    /// Returns the number of videos in the playlist
+    #[must_use]
+    pub fn len(&self) -> usize {
+        self.videos.len()
+    }
+    /// Is the playlist empty?
+    #[must_use]
+    pub fn is_empty(&self) -> bool {
+        self.videos.is_empty()
+    }
+}
+
+/// Return the current playlist index.
+///
+/// This effectively looks for the currently focused video and returns it's index.
+///
+/// # Panics
+/// Only if internal assertions fail.
+pub async fn current_playlist_index(app: &App) -> Result<Option<PlaylistIndex>> {
+    if let Some(focused) = currently_focused_video(app).await? {
+        let playlist = playlist(app).await?;
+        let index = playlist
+            .find_index(&focused.extractor_hash)
+            .expect("All focused videos must also be in the playlist");
+        Ok(Some(index))
+    } else {
+        Ok(None)
+    }
+}
+
+/// Return the video in the playlist at the position `index`.
+pub async fn playlist_entry(app: &App, index: PlaylistIndex) -> Result<Option<Video>> {
+    let playlist = playlist(app).await?;
+
+    if let Some(vid) = playlist.get(index) {
+        Ok(Some(vid.to_owned()))
+    } else {
+        Ok(None)
+    }
+}
+
+pub async fn playlist(app: &App) -> Result<Playlist> {
+    let videos = super::videos(app, &[VideoStatusMarker::Cached]).await?;
+
+    Ok(Playlist { videos })
+}
+
+/// This returns the video with the `is_focused` flag set.
+/// # Panics
+/// Only if assertions fail.
+pub async fn currently_focused_video(app: &App) -> Result<Option<Video>> {
+    let cached_status = VideoStatusMarker::Cached.as_db_integer();
+    let record = query!(
+        "SELECT * FROM videos WHERE is_focused = 1 AND status = ?",
+        cached_status
+    )
+    .fetch_one(&app.database)
+    .await;
+
+    if let Err(sqlx::Error::RowNotFound) = record {
+        Ok(None)
+    } else {
+        let base = record?;
+        Ok(Some(video_from_record! {base}))
+    }
+}
diff --git a/crates/yt/src/storage/video_database/mod.rs b/crates/yt/src/storage/video_database/mod.rs
new file mode 100644
index 0000000..74d09f0
--- /dev/null
+++ b/crates/yt/src/storage/video_database/mod.rs
@@ -0,0 +1,329 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    fmt::{Display, Write},
+    path::PathBuf,
+    time::Duration,
+};
+
+use chrono::{DateTime, Utc};
+use url::Url;
+
+use crate::{
+    app::App, select::selection_file::duration::MaybeDuration,
+    storage::video_database::extractor_hash::ExtractorHash,
+};
+
+pub mod downloader;
+pub mod extractor_hash;
+pub mod get;
+pub mod notify;
+pub mod set;
+
+#[derive(Debug, Clone)]
+pub struct Video {
+    pub description: Option<String>,
+    pub duration: MaybeDuration,
+    pub extractor_hash: ExtractorHash,
+    pub last_status_change: TimeStamp,
+
+    /// The associated subscription this video was fetched from (null, when the video was `add`ed)
+    pub parent_subscription_name: Option<String>,
+    pub priority: Priority,
+    pub publish_date: Option<TimeStamp>,
+    pub status: VideoStatus,
+    pub thumbnail_url: Option<Url>,
+    pub title: String,
+    pub url: Url,
+
+    /// The seconds the user has already watched the video
+    pub watch_progress: Duration,
+}
+
+/// The priority of a [`Video`].
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+pub struct Priority {
+    value: i64,
+}
+impl Priority {
+    /// Return the underlying value to insert that into the database
+    #[must_use]
+    pub fn as_db_integer(&self) -> i64 {
+        self.value
+    }
+}
+impl From<i64> for Priority {
+    fn from(value: i64) -> Self {
+        Self { value }
+    }
+}
+impl Display for Priority {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        self.value.fmt(f)
+    }
+}
+
+/// An UNIX time stamp.
+#[derive(Debug, Default, Clone, Copy)]
+pub struct TimeStamp {
+    value: i64,
+}
+impl TimeStamp {
+    /// Return the seconds since the UNIX epoch for this [`TimeStamp`].
+    #[must_use]
+    pub fn as_secs(&self) -> i64 {
+        self.value
+    }
+
+    /// Construct a [`TimeStamp`] from a count of seconds since the UNIX epoch.
+    #[must_use]
+    pub fn from_secs(value: i64) -> Self {
+        Self { value }
+    }
+
+    /// Construct a [`TimeStamp`] from the current time.
+    #[must_use]
+    pub fn from_now() -> Self {
+        Self {
+            value: Utc::now().timestamp(),
+        }
+    }
+}
+impl Display for TimeStamp {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        DateTime::from_timestamp(self.value, 0)
+            .expect("The timestamps should always be valid")
+            .format("%Y-%m-%d")
+            .fmt(f)
+    }
+}
+
+#[derive(Debug)]
+pub struct VideoOptions {
+    pub yt_dlp: YtDlpOptions,
+    pub mpv: MpvOptions,
+}
+impl VideoOptions {
+    pub(crate) fn new(subtitle_langs: String, playback_speed: f64) -> Self {
+        let yt_dlp = YtDlpOptions { subtitle_langs };
+        let mpv = MpvOptions { playback_speed };
+        Self { yt_dlp, mpv }
+    }
+
+    /// This will write out the options that are different from the defaults.
+    /// Beware, that this does not set the priority.
+    #[must_use]
+    pub fn to_cli_flags(self, app: &App) -> String {
+        let mut f = String::new();
+
+        if (self.mpv.playback_speed - app.config.select.playback_speed).abs() > f64::EPSILON {
+            write!(f, " --speed '{}'", self.mpv.playback_speed).expect("Works");
+        }
+        if self.yt_dlp.subtitle_langs != app.config.select.subtitle_langs {
+            write!(f, " --subtitle-langs '{}'", self.yt_dlp.subtitle_langs).expect("Works");
+        }
+
+        f.trim().to_owned()
+    }
+}
+
+#[derive(Debug, Clone, Copy)]
+/// Additionally settings passed to mpv on watch
+pub struct MpvOptions {
+    /// The playback speed. (1 is 100%, 2.7 is 270%, and so on)
+    pub playback_speed: f64,
+}
+
+#[derive(Debug)]
+/// Additionally configuration options, passed to yt-dlp on download
+pub struct YtDlpOptions {
+    /// In the form of `lang1,lang2,lang3` (e.g. `en,de,sv`)
+    pub subtitle_langs: String,
+}
+
+/// # Video Lifetime (words in <brackets> are commands):
+///      <Pick>
+///     /    \
+/// <Watch>   <Drop> -> Dropped // yt select
+///     |
+/// Cache                       // yt cache
+///     |
+/// Watched                     // yt watch
+#[derive(Default, Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
+pub enum VideoStatus {
+    #[default]
+    Pick,
+
+    /// The video has been select to be watched
+    Watch,
+    /// The video has been cached and is ready to be watched
+    Cached {
+        cache_path: PathBuf,
+        is_focused: bool,
+    },
+    /// The video has been watched
+    Watched,
+
+    /// The video has been select to be dropped
+    Drop,
+    /// The video has been dropped
+    Dropped,
+}
+
+impl VideoStatus {
+    /// Reconstruct a [`VideoStatus`] for it's marker and the optional parts.
+    /// This should only be used by the db record to [`Video`] code.
+    ///
+    /// # Panics
+    /// Only if internal expectations fail.
+    #[must_use]
+    pub fn from_marker(marker: VideoStatusMarker, optional: Option<(PathBuf, bool)>) -> Self {
+        match marker {
+            VideoStatusMarker::Pick => Self::Pick,
+            VideoStatusMarker::Watch => Self::Watch,
+            VideoStatusMarker::Cached => {
+                let (cache_path, is_focused) =
+                    optional.expect("This should be some, when the video status is cached");
+                Self::Cached {
+                    cache_path,
+                    is_focused,
+                }
+            }
+            VideoStatusMarker::Watched => Self::Watched,
+            VideoStatusMarker::Drop => Self::Drop,
+            VideoStatusMarker::Dropped => Self::Dropped,
+        }
+    }
+
+    /// Turn the [`VideoStatus`] to its internal parts. This is only really useful for the database
+    /// functions.
+    #[must_use]
+    pub fn to_parts_for_db(self) -> (VideoStatusMarker, Option<(PathBuf, bool)>) {
+        match self {
+            VideoStatus::Pick => (VideoStatusMarker::Pick, None),
+            VideoStatus::Watch => (VideoStatusMarker::Watch, None),
+            VideoStatus::Cached {
+                cache_path,
+                is_focused,
+            } => (VideoStatusMarker::Cached, Some((cache_path, is_focused))),
+            VideoStatus::Watched => (VideoStatusMarker::Watched, None),
+            VideoStatus::Drop => (VideoStatusMarker::Drop, None),
+            VideoStatus::Dropped => (VideoStatusMarker::Dropped, None),
+        }
+    }
+
+    /// Return the associated [`VideoStatusMarker`] for this [`VideoStatus`].
+    #[must_use]
+    pub fn as_marker(&self) -> VideoStatusMarker {
+        match self {
+            VideoStatus::Pick => VideoStatusMarker::Pick,
+            VideoStatus::Watch => VideoStatusMarker::Watch,
+            VideoStatus::Cached { .. } => VideoStatusMarker::Cached,
+            VideoStatus::Watched => VideoStatusMarker::Watched,
+            VideoStatus::Drop => VideoStatusMarker::Drop,
+            VideoStatus::Dropped => VideoStatusMarker::Dropped,
+        }
+    }
+}
+
+/// Unit only variant of [`VideoStatus`]
+#[derive(Default, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
+pub enum VideoStatusMarker {
+    #[default]
+    Pick,
+
+    /// The video has been select to be watched
+    Watch,
+    /// The video has been cached and is ready to be watched
+    Cached,
+    /// The video has been watched
+    Watched,
+
+    /// The video has been select to be dropped
+    Drop,
+    /// The video has been dropped
+    Dropped,
+}
+
+impl VideoStatusMarker {
+    pub const ALL: &'static [Self; 6] = &[
+        Self::Pick,
+        //
+        Self::Watch,
+        Self::Cached,
+        Self::Watched,
+        //
+        Self::Drop,
+        Self::Dropped,
+    ];
+
+    #[must_use]
+    pub fn as_command(&self) -> &str {
+        // NOTE: Keep the serialize able variants synced with the main `select` function <2024-06-14>
+        // Also try to ensure, that the strings have the same length
+        match self {
+            Self::Pick => "pick   ",
+
+            Self::Watch | Self::Cached => "watch  ",
+            Self::Watched => "watched",
+
+            Self::Drop | Self::Dropped => "drop   ",
+        }
+    }
+
+    #[must_use]
+    pub fn as_db_integer(&self) -> i64 {
+        // These numbers should not change their mapping!
+        // Oh, and keep them in sync with the SQLite check constraint.
+        match self {
+            Self::Pick => 0,
+
+            Self::Watch => 1,
+            Self::Cached => 2,
+            Self::Watched => 3,
+
+            Self::Drop => 4,
+            Self::Dropped => 5,
+        }
+    }
+    #[must_use]
+    pub fn from_db_integer(num: i64) -> Self {
+        match num {
+            0 => Self::Pick,
+
+            1 => Self::Watch,
+            2 => Self::Cached,
+            3 => Self::Watched,
+
+            4 => Self::Drop,
+            5 => Self::Dropped,
+            other => unreachable!(
+                "The database returned a enum discriminator, unknown to us: '{}'",
+                other
+            ),
+        }
+    }
+
+    #[must_use]
+    pub fn as_str(&self) -> &'static str {
+        match self {
+            Self::Pick => "Pick",
+
+            Self::Watch => "Watch",
+            Self::Cached => "Cache",
+            Self::Watched => "Watched",
+
+            Self::Drop => "Drop",
+            Self::Dropped => "Dropped",
+        }
+    }
+}
diff --git a/crates/yt/src/storage/video_database/notify.rs b/crates/yt/src/storage/video_database/notify.rs
new file mode 100644
index 0000000..b55c00a
--- /dev/null
+++ b/crates/yt/src/storage/video_database/notify.rs
@@ -0,0 +1,77 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    path::{Path, PathBuf},
+    sync::mpsc,
+    thread::sleep,
+    time::Duration,
+};
+
+use crate::app::App;
+
+use anyhow::{Context, Result};
+use notify::{
+    Event, EventKind, RecursiveMode, Watcher,
+    event::{DataChange, ModifyKind},
+};
+use tokio::task;
+
+/// This functions registers a watcher for the database and only returns once a write was
+/// registered for the database.
+pub async fn wait_for_db_write(app: &App) -> Result<()> {
+    let db_path: PathBuf = app.config.paths.database_path.clone();
+    task::spawn_blocking(move || wait_for_db_write_sync(&db_path)).await?
+}
+
+fn wait_for_db_write_sync(db_path: &Path) -> Result<()> {
+    let (tx, rx) = mpsc::channel::<notify::Result<Event>>();
+
+    let mut watcher = notify::recommended_watcher(tx)?;
+
+    watcher.watch(db_path, RecursiveMode::NonRecursive)?;
+
+    for res in rx {
+        let event = res.context("Failed to wait for db write")?;
+
+        if let EventKind::Modify(ModifyKind::Data(DataChange::Any)) = event.kind {
+            // Buffer some of the `Modify` event burst.
+            sleep(Duration::from_millis(10));
+
+            return Ok(());
+        }
+    }
+
+    Ok(())
+}
+
+/// This functions registers a watcher for the cache path and returns once a file was removed
+pub async fn wait_for_cache_reduction(app: &App) -> Result<()> {
+    let download_directory: PathBuf = app.config.paths.download_dir.clone();
+    task::spawn_blocking(move || wait_for_cache_reduction_sync(&download_directory)).await?
+}
+
+fn wait_for_cache_reduction_sync(download_directory: &Path) -> Result<()> {
+    let (tx, rx) = mpsc::channel::<notify::Result<Event>>();
+
+    let mut watcher = notify::recommended_watcher(tx)?;
+
+    watcher.watch(download_directory, RecursiveMode::Recursive)?;
+
+    for res in rx {
+        let event = res.context("Failed to wait for cache size reduction")?;
+
+        if let EventKind::Remove(_) = event.kind {
+            return Ok(());
+        }
+    }
+
+    Ok(())
+}
diff --git a/crates/yt/src/storage/video_database/set/mod.rs b/crates/yt/src/storage/video_database/set/mod.rs
new file mode 100644
index 0000000..1b19011
--- /dev/null
+++ b/crates/yt/src/storage/video_database/set/mod.rs
@@ -0,0 +1,327 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+//! These functions change the database. They are added on a demand basis.
+
+use std::path::{Path, PathBuf};
+
+use anyhow::{Context, Result};
+use chrono::Utc;
+use log::{debug, info};
+use sqlx::query;
+use tokio::fs;
+
+use crate::{app::App, storage::video_database::extractor_hash::ExtractorHash, video_from_record};
+
+use super::{Priority, Video, VideoOptions, VideoStatus};
+
+mod playlist;
+pub use playlist::*;
+
+const fn is_focused_to_value(is_focused: bool) -> Option<i8> {
+    if is_focused { Some(1) } else { None }
+}
+
+/// Set a new status for a video.
+/// This will only update the status time stamp/priority when the status or the priority has changed .
+pub async fn video_status(
+    app: &App,
+    video_hash: &ExtractorHash,
+    new_status: VideoStatus,
+    new_priority: Option<Priority>,
+) -> Result<()> {
+    let video_hash = video_hash.hash().to_string();
+
+    let old = {
+        let base = query!(
+            r#"
+    SELECT *
+    FROM videos
+    WHERE extractor_hash = ?
+    "#,
+            video_hash
+        )
+        .fetch_one(&app.database)
+        .await?;
+
+        video_from_record! {base}
+    };
+
+    let old_marker = old.status.as_marker();
+    let (cache_path, is_focused) = {
+        fn cache_path_to_string(path: &Path) -> Result<String> {
+            Ok(path
+                .to_str()
+                .with_context(|| {
+                    format!(
+                        "Failed to parse cache path ('{}') as utf8 string",
+                        path.display()
+                    )
+                })?
+                .to_owned())
+        }
+
+        if let VideoStatus::Cached {
+            cache_path,
+            is_focused,
+        } = &new_status
+        {
+            (
+                Some(cache_path_to_string(cache_path)?),
+                is_focused_to_value(*is_focused),
+            )
+        } else {
+            (None, None)
+        }
+    };
+
+    let new_status = new_status.as_marker();
+
+    if let Some(new_priority) = new_priority {
+        if old_marker == new_status && old.priority == new_priority {
+            return Ok(());
+        }
+
+        let now = Utc::now().timestamp();
+
+        debug!("Running status change: {old_marker:#?} -> {new_status:#?}...",);
+
+        let new_status = new_status.as_db_integer();
+        let new_priority = new_priority.as_db_integer();
+        query!(
+            r#"
+        UPDATE videos
+        SET status = ?, last_status_change = ?, priority = ?, cache_path = ?, is_focused = ?
+        WHERE extractor_hash = ?;
+        "#,
+            new_status,
+            now,
+            new_priority,
+            cache_path,
+            is_focused,
+            video_hash
+        )
+        .execute(&app.database)
+        .await?;
+    } else {
+        if old_marker == new_status {
+            return Ok(());
+        }
+
+        let now = Utc::now().timestamp();
+
+        debug!("Running status change: {old_marker:#?} -> {new_status:#?}...",);
+
+        let new_status = new_status.as_db_integer();
+        query!(
+            r#"
+        UPDATE videos
+        SET status = ?, last_status_change = ?, cache_path = ?, is_focused = ?
+        WHERE extractor_hash = ?;
+        "#,
+            new_status,
+            now,
+            cache_path,
+            is_focused,
+            video_hash
+        )
+        .execute(&app.database)
+        .await?;
+    }
+
+    debug!("Finished status change.");
+    Ok(())
+}
+
+/// Mark a video as watched.
+/// This will both set the status to `Watched` and the `cache_path` to Null.
+///
+/// # Panics
+/// Only if assertions fail.
+pub async fn video_watched(app: &App, video: &ExtractorHash) -> Result<()> {
+    let old = {
+        let video_hash = video.hash().to_string();
+
+        let base = query!(
+            r#"
+    SELECT *
+    FROM videos
+    WHERE extractor_hash = ?
+    "#,
+            video_hash
+        )
+        .fetch_one(&app.database)
+        .await?;
+
+        video_from_record! {base}
+    };
+
+    info!("Will set video watched: '{}'", old.title);
+
+    if let VideoStatus::Cached { cache_path, .. } = &old.status {
+        if let Ok(true) = cache_path.try_exists() {
+            fs::remove_file(cache_path).await?;
+        }
+    } else {
+        unreachable!("The video must be marked as Cached before it can be marked Watched");
+    }
+
+    video_status(app, video, VideoStatus::Watched, None).await?;
+
+    Ok(())
+}
+
+pub(crate) async fn video_watch_progress(
+    app: &App,
+    extractor_hash: &ExtractorHash,
+    watch_progress: u32,
+) -> std::result::Result<(), anyhow::Error> {
+    let video_extractor_hash = extractor_hash.hash().to_string();
+
+    query!(
+        r#"
+            UPDATE videos
+            SET watch_progress = ?
+            WHERE extractor_hash = ?;
+        "#,
+        watch_progress,
+        video_extractor_hash,
+    )
+    .execute(&app.database)
+    .await?;
+
+    Ok(())
+}
+
+pub async fn set_video_options(
+    app: &App,
+    hash: &ExtractorHash,
+    video_options: &VideoOptions,
+) -> Result<()> {
+    let video_extractor_hash = hash.hash().to_string();
+    let playback_speed = video_options.mpv.playback_speed;
+    let subtitle_langs = &video_options.yt_dlp.subtitle_langs;
+
+    query!(
+        r#"
+            UPDATE video_options
+            SET playback_speed = ?, subtitle_langs = ?
+            WHERE extractor_hash = ?;
+        "#,
+        playback_speed,
+        subtitle_langs,
+        video_extractor_hash,
+    )
+    .execute(&app.database)
+    .await?;
+
+    Ok(())
+}
+
+/// # Panics
+/// Only if internal expectations fail.
+pub async fn add_video(app: &App, video: Video) -> Result<()> {
+    let parent_subscription_name = video.parent_subscription_name;
+
+    let thumbnail_url = video.thumbnail_url.map(|val| val.to_string());
+
+    let url = video.url.to_string();
+    let extractor_hash = video.extractor_hash.hash().to_string();
+
+    let default_subtitle_langs = &app.config.select.subtitle_langs;
+    let default_mpv_playback_speed = app.config.select.playback_speed;
+
+    let status = video.status.as_marker().as_db_integer();
+    let (cache_path, is_focused) = if let VideoStatus::Cached {
+        cache_path,
+        is_focused,
+    } = video.status
+    {
+        (
+            Some(
+                cache_path
+                    .to_str()
+                    .with_context(|| {
+                        format!(
+                            "Failed to prase cache path '{}' as utf-8 string",
+                            cache_path.display()
+                        )
+                    })?
+                    .to_string(),
+            ),
+            is_focused_to_value(is_focused),
+        )
+    } else {
+        (None, None)
+    };
+
+    let duration: Option<f64> = video.duration.as_secs_f64();
+    let last_status_change: i64 = video.last_status_change.as_secs();
+    let publish_date: Option<i64> = video.publish_date.map(|pd| pd.as_secs());
+    let watch_progress: i64 =
+        i64::try_from(video.watch_progress.as_secs()).expect("This should never exceed a u32");
+
+    let mut tx = app.database.begin().await?;
+    query!(
+        r#"
+        INSERT INTO videos (
+            description,
+            duration,
+            extractor_hash,
+            is_focused,
+            last_status_change,
+            parent_subscription_name,
+            publish_date,
+            status,
+            thumbnail_url,
+            title,
+            url,
+            watch_progress,
+            cache_path
+            )
+        VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);
+    "#,
+        video.description,
+        duration,
+        extractor_hash,
+        is_focused,
+        last_status_change,
+        parent_subscription_name,
+        publish_date,
+        status,
+        thumbnail_url,
+        video.title,
+        url,
+        watch_progress,
+        cache_path,
+    )
+    .execute(&mut *tx)
+    .await?;
+
+    query!(
+        r#"
+        INSERT INTO video_options (
+            extractor_hash,
+            subtitle_langs,
+            playback_speed)
+        VALUES (?, ?, ?);
+    "#,
+        extractor_hash,
+        default_subtitle_langs,
+        default_mpv_playback_speed
+    )
+    .execute(&mut *tx)
+    .await?;
+
+    tx.commit().await?;
+
+    Ok(())
+}
diff --git a/crates/yt/src/storage/video_database/set/playlist.rs b/crates/yt/src/storage/video_database/set/playlist.rs
new file mode 100644
index 0000000..547df21
--- /dev/null
+++ b/crates/yt/src/storage/video_database/set/playlist.rs
@@ -0,0 +1,101 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::Result;
+use log::debug;
+use sqlx::query;
+
+use crate::{
+    app::App,
+    storage::video_database::{extractor_hash::ExtractorHash, get},
+};
+
+/// Set a video to be focused.
+/// This optionally takes another video hash, which marks the old focused video.
+/// This will then be disabled.
+///
+/// # Panics
+/// Only if internal assertions fail.
+pub async fn focused(
+    app: &App,
+    new_video_hash: &ExtractorHash,
+    old_video_hash: Option<&ExtractorHash>,
+) -> Result<()> {
+    unfocused(app, old_video_hash).await?;
+
+    debug!("Focusing video: '{new_video_hash}'");
+    let new_hash = new_video_hash.hash().to_string();
+    query!(
+        r#"
+            UPDATE videos
+            SET is_focused = 1
+            WHERE extractor_hash = ?;
+        "#,
+        new_hash,
+    )
+    .execute(&app.database)
+    .await?;
+
+    assert_eq!(
+        *new_video_hash,
+        get::currently_focused_video(app)
+            .await?
+            .expect("This is some at this point")
+            .extractor_hash
+    );
+    Ok(())
+}
+
+/// Set a video to be no longer focused.
+/// This will use the supplied `video_hash` if it is [`Some`], otherwise it will simply un-focus
+/// the currently focused video.
+///
+/// # Panics
+/// Only if internal assertions fail.
+pub async fn unfocused(app: &App, video_hash: Option<&ExtractorHash>) -> Result<()> {
+    let hash = if let Some(hash) = video_hash {
+        hash.hash().to_string()
+    } else {
+        let output = query!(
+            r#"
+                SELECT extractor_hash
+                FROM videos
+                WHERE is_focused = 1;
+            "#,
+        )
+        .fetch_optional(&app.database)
+        .await?;
+
+        if let Some(output) = output {
+            output.extractor_hash
+        } else {
+            // There is no unfocused video right now.
+            return Ok(());
+        }
+    };
+    debug!("Unfocusing video: '{hash}'");
+
+    query!(
+        r#"
+            UPDATE videos
+            SET is_focused = NULL
+            WHERE extractor_hash = ?;
+        "#,
+        hash
+    )
+    .execute(&app.database)
+    .await?;
+
+    assert!(
+        get::currently_focused_video(app).await?.is_none(),
+        "We assumed that the video we just removed was actually a focused one."
+    );
+    Ok(())
+}
diff --git a/crates/yt/src/subscribe/mod.rs b/crates/yt/src/subscribe/mod.rs
new file mode 100644
index 0000000..66797e8
--- /dev/null
+++ b/crates/yt/src/subscribe/mod.rs
@@ -0,0 +1,193 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::str::FromStr;
+
+use anyhow::{Context, Result, bail};
+use futures::FutureExt;
+use log::{error, warn};
+use tokio::io::{AsyncBufRead, AsyncBufReadExt};
+use url::Url;
+use yt_dlp::{json_cast, json_get, options::YoutubeDLOptions};
+
+use crate::{
+    app::App,
+    storage::subscriptions::{
+        Subscription, add_subscription, check_url, get, remove_all, remove_subscription,
+    },
+    unreachable::Unreachable,
+};
+
+pub async fn unsubscribe(app: &App, name: String) -> Result<()> {
+    let present_subscriptions = get(app).await?;
+
+    if let Some(subscription) = present_subscriptions.0.get(&name) {
+        remove_subscription(app, subscription).await?;
+    } else {
+        bail!("Couldn't find subscription: '{}'", &name);
+    }
+
+    Ok(())
+}
+
+pub async fn import<W: AsyncBufRead + AsyncBufReadExt + Unpin>(
+    app: &App,
+    reader: W,
+    force: bool,
+) -> Result<()> {
+    if force {
+        remove_all(app).await?;
+    }
+
+    let mut lines = reader.lines();
+    while let Some(line) = lines.next_line().await? {
+        let url =
+            Url::from_str(&line).with_context(|| format!("Failed to parse '{line}' as url"))?;
+        match subscribe(app, None, url)
+            .await
+            .with_context(|| format!("Failed to subscribe to: '{line}'"))
+        {
+            Ok(()) => (),
+            Err(err) => eprintln!(
+                "Error while subscribing to '{}': '{}'",
+                line,
+                err.source().unreachable("Should have a source")
+            ),
+        }
+    }
+
+    Ok(())
+}
+
+pub async fn subscribe(app: &App, name: Option<String>, url: Url) -> Result<()> {
+    if !(url.as_str().ends_with("videos")
+        || url.as_str().ends_with("streams")
+        || url.as_str().ends_with("shorts")
+        || url.as_str().ends_with("videos/")
+        || url.as_str().ends_with("streams/")
+        || url.as_str().ends_with("shorts/"))
+        && url.as_str().contains("youtube.com")
+    {
+        warn!(
+            "Your youtbe url does not seem like it actually tracks a channels playlist (videos, streams, shorts). Adding subscriptions for each of them..."
+        );
+
+        let url = Url::parse(&(url.as_str().to_owned() + "/"))
+            .unreachable("This was an url, it should stay one");
+
+        if let Some(name) = name {
+            let out: Result<()> = async move {
+                actual_subscribe(
+                    app,
+                    Some(name.clone() + " {Videos}"),
+                    url.join("videos/")
+                        .unreachable("The url should allow being joined onto"),
+                )
+                .await
+                .with_context(|| {
+                    format!("Failed to subscribe to '{}'", name.clone() + " {Videos}")
+                })?;
+
+                actual_subscribe(
+                    app,
+                    Some(name.clone() + " {Streams}"),
+                    url.join("streams/").unreachable("See above."),
+                )
+                .await
+                .with_context(|| {
+                    format!("Failed to subscribe to '{}'", name.clone() + " {Streams}")
+                })?;
+
+                actual_subscribe(
+                    app,
+                    Some(name.clone() + " {Shorts}"),
+                    url.join("shorts/").unreachable("See above."),
+                )
+                .await
+                .with_context(|| format!("Failed to subscribe to '{}'", name + " {Shorts}"))?;
+
+                Ok(())
+            }
+            .boxed()
+            .await;
+
+            out?;
+        } else {
+            let _ = actual_subscribe(app, None, url.join("videos/").unreachable("See above."))
+                .await
+                .map_err(|err| {
+                    error!("Failed to subscribe to the '{}' variant: {err}", "{Videos}");
+                });
+
+            let _ = actual_subscribe(app, None, url.join("streams/").unreachable("See above."))
+                .await
+                .map_err(|err| {
+                    error!(
+                        "Failed to subscribe to the '{}' variant: {err}",
+                        "{Streams}"
+                    );
+                });
+
+            let _ = actual_subscribe(app, None, url.join("shorts/").unreachable("See above."))
+                .await
+                .map_err(|err| {
+                    error!("Failed to subscribe to the '{}' variant: {err}", "{Shorts}");
+                });
+        }
+    } else {
+        actual_subscribe(app, name, url).await?;
+    }
+
+    Ok(())
+}
+
+async fn actual_subscribe(app: &App, name: Option<String>, url: Url) -> Result<()> {
+    if !check_url(url.clone()).await? {
+        bail!("The url ('{}') does not represent a playlist!", &url)
+    }
+
+    let name = if let Some(name) = name {
+        name
+    } else {
+        let yt_dlp = YoutubeDLOptions::new()
+            .set("playliststart", 1)
+            .set("playlistend", 10)
+            .set("noplaylist", false)
+            .set("extract_flat", "in_playlist")
+            .build()?;
+
+        let info = yt_dlp.extract_info(&url, false, false)?;
+
+        if info.get("_type").map(|v| json_cast!(v, as_str)) == Some("playlist") {
+            json_get!(info, "title", as_str).to_owned()
+        } else {
+            bail!("The url ('{}') does not represent a playlist!", &url)
+        }
+    };
+
+    let present_subscriptions = get(app).await?;
+
+    if let Some(subs) = present_subscriptions.0.get(&name) {
+        bail!(
+            "The subscription '{}' could not be added, \
+                as another one with the same name ('{}') already exists. It links to the Url: '{}'",
+            name,
+            name,
+            subs.url
+        );
+    }
+
+    let sub = Subscription { name, url };
+
+    add_subscription(app, &sub).await?;
+
+    Ok(())
+}
diff --git a/crates/yt/src/unreachable.rs b/crates/yt/src/unreachable.rs
new file mode 100644
index 0000000..436fbb6
--- /dev/null
+++ b/crates/yt/src/unreachable.rs
@@ -0,0 +1,50 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+// This has been taken from: https://gitlab.torproject.org/tpo/core/arti/-/issues/950
+
+// The functions here should be annotated with `#[inline(always)]`.
+#![allow(clippy::inline_always)]
+
+use std::fmt::Debug;
+
+/// Trait for something that can possibly be unwrapped, like a Result or Option.
+/// It provides semantic information, that unwrapping here should never happen.
+pub trait Unreachable<T> {
+    /// Like `expect()`, but does not trigger clippy.
+    ///
+    /// # Usage
+    ///
+    /// This method only exists so that we can use it without hitting
+    /// `clippy::missing_panics_docs`.  Therefore, we should only use it
+    /// for situations where we are certain that the panic cannot occur
+    /// unless something else is very broken.  Consider instead using
+    /// `expect()` and adding a `Panics` section to your function
+    /// documentation.
+    ///
+    /// # Panics
+    ///
+    /// Panics if this is not an object that can be unwrapped, such as
+    /// None or  an Err.
+    fn unreachable(self, msg: &str) -> T;
+}
+impl<T> Unreachable<T> for Option<T> {
+    #[inline(always)]
+    fn unreachable(self, msg: &str) -> T {
+        self.expect(msg)
+    }
+}
+impl<T, E: Debug> Unreachable<T> for Result<T, E> {
+    #[inline(always)]
+    fn unreachable(self, msg: &str) -> T {
+        self.expect(msg)
+    }
+}
diff --git a/crates/yt/src/update/mod.rs b/crates/yt/src/update/mod.rs
new file mode 100644
index 0000000..7f9bee7
--- /dev/null
+++ b/crates/yt/src/update/mod.rs
@@ -0,0 +1,204 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{str::FromStr, time::Duration};
+
+use anyhow::{Context, Ok, Result};
+use chrono::{DateTime, Utc};
+use log::warn;
+use url::Url;
+use yt_dlp::{info_json::InfoJson, json_cast, json_get};
+
+use crate::{
+    app::App,
+    select::selection_file::duration::MaybeDuration,
+    storage::{
+        subscriptions::{self, Subscription},
+        video_database::{
+            Priority, TimeStamp, Video, VideoStatus, extractor_hash::ExtractorHash,
+            get::get_all_hashes, set::add_video,
+        },
+    },
+};
+
+mod updater;
+use updater::Updater;
+
+pub async fn update(
+    app: &App,
+    max_backlog: usize,
+    subscription_names_to_update: Vec<String>,
+    total_number: Option<usize>,
+    current_progress: Option<usize>,
+) -> Result<()> {
+    let subscriptions = subscriptions::get(app).await?;
+
+    let subs: Vec<Subscription> = if subscription_names_to_update.is_empty() {
+        subscriptions.0.into_values().collect()
+    } else {
+        subscriptions
+            .0
+            .into_values()
+            .filter(|sub| subscription_names_to_update.contains(&sub.name))
+            .collect()
+    };
+
+    // We can get away with not having to re-fetch the hashes every time, as the returned video
+    // should not contain duplicates.
+    let hashes = get_all_hashes(app).await?;
+
+    let updater = Updater::new(max_backlog, hashes);
+    updater
+        .update(app, subs, total_number, current_progress)
+        .await?;
+
+    Ok(())
+}
+
+#[allow(clippy::too_many_lines)]
+pub fn video_entry_to_video(entry: &InfoJson, sub: Option<&Subscription>) -> Result<Video> {
+    fn fmt_context(date: &str, extended: Option<&str>) -> String {
+        let f = format!(
+            "Failed to parse the `upload_date` of the entry ('{date}'). \
+                    Expected `YYYY-MM-DD`, has the format changed?"
+        );
+        if let Some(date_string) = extended {
+            format!("{f}\nThe parsed '{date_string}' can't be turned to a valid UTC date.'")
+        } else {
+            f
+        }
+    }
+
+    let publish_date = if let Some(date) = &entry.get("upload_date") {
+        let date = json_cast!(date, as_str);
+
+        let year: u32 = date
+            .chars()
+            .take(4)
+            .collect::<String>()
+            .parse()
+            .with_context(|| fmt_context(date, None))?;
+        let month: u32 = date
+            .chars()
+            .skip(4)
+            .take(2)
+            .collect::<String>()
+            .parse()
+            .with_context(|| fmt_context(date, None))?;
+        let day: u32 = date
+            .chars()
+            .skip(4 + 2)
+            .take(2)
+            .collect::<String>()
+            .parse()
+            .with_context(|| fmt_context(date, None))?;
+
+        let date_string = format!("{year:04}-{month:02}-{day:02}T00:00:00Z");
+        Some(
+            DateTime::<Utc>::from_str(&date_string)
+                .with_context(|| fmt_context(date, Some(&date_string)))?
+                .timestamp(),
+        )
+    } else {
+        warn!(
+            "The video '{}' lacks it's upload date!",
+            json_get!(entry, "title", as_str)
+        );
+        None
+    };
+
+    let thumbnail_url = match (&entry.get("thumbnails"), &entry.get("thumbnail")) {
+        (None, None) => None,
+        (None, Some(thumbnail)) => Some(Url::from_str(json_cast!(thumbnail, as_str))?),
+
+        // TODO: The algorithm is not exactly the best <2024-05-28>
+        (Some(thumbnails), None) => {
+            if let Some(thumbnail) = json_cast!(thumbnails, as_array).first() {
+                Some(Url::from_str(json_get!(
+                    json_cast!(thumbnail, as_object),
+                    "url",
+                    as_str
+                ))?)
+            } else {
+                None
+            }
+        }
+        (Some(_), Some(thumnail)) => Some(Url::from_str(json_cast!(thumnail, as_str))?),
+    };
+
+    let url = {
+        let smug_url: Url = json_get!(entry, "webpage_url", as_str).parse()?;
+        // TODO(@bpeetz): We should probably add this? <2025-06-14>
+        // if '#__youtubedl_smuggle' not in smug_url:
+        //     return smug_url, default
+        // url, _, sdata = smug_url.rpartition('#')
+        // jsond = urllib.parse.parse_qs(sdata)['__youtubedl_smuggle'][0]
+        // data = json.loads(jsond)
+        // return url, data
+
+        smug_url
+    };
+
+    let extractor_hash = blake3::hash(json_get!(entry, "id", as_str).as_bytes());
+
+    let subscription_name = if let Some(sub) = sub {
+        Some(sub.name.clone())
+    } else if let Some(uploader) = entry.get("uploader").map(|val| json_cast!(val, as_str)) {
+        if entry
+            .get("webpage_url_domain")
+            .map(|val| json_cast!(val, as_str))
+            == Some("youtube.com")
+        {
+            Some(format!("{uploader} - Videos"))
+        } else {
+            Some(uploader.to_owned())
+        }
+    } else {
+        None
+    };
+
+    let video = Video {
+        description: entry
+            .get("description")
+            .map(|val| json_cast!(val, as_str).to_owned()),
+        duration: MaybeDuration::from_maybe_secs_f64(
+            entry.get("duration").map(|val| json_cast!(val, as_f64)),
+        ),
+        extractor_hash: ExtractorHash::from_hash(extractor_hash),
+        last_status_change: TimeStamp::from_now(),
+        parent_subscription_name: subscription_name,
+        priority: Priority::default(),
+        publish_date: publish_date.map(TimeStamp::from_secs),
+        status: VideoStatus::Pick,
+        thumbnail_url,
+        title: json_get!(entry, "title", as_str).to_owned(),
+        url,
+        watch_progress: Duration::default(),
+    };
+    Ok(video)
+}
+
+async fn process_subscription(app: &App, sub: Subscription, entry: InfoJson) -> Result<()> {
+    let video = video_entry_to_video(&entry, Some(&sub))
+        .context("Failed to parse search entry as Video")?;
+
+    add_video(app, video.clone())
+        .await
+        .with_context(|| format!("Failed to add video to database: '{}'", video.title))?;
+    println!(
+        "{}",
+        &video
+            .to_line_display(app)
+            .await
+            .with_context(|| format!("Failed to format video: '{}'", video.title))?
+    );
+    Ok(())
+}
diff --git a/crates/yt/src/update/updater.rs b/crates/yt/src/update/updater.rs
new file mode 100644
index 0000000..75d12dc
--- /dev/null
+++ b/crates/yt/src/update/updater.rs
@@ -0,0 +1,194 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    io::{Write, stderr},
+    sync::atomic::{AtomicUsize, Ordering},
+};
+
+use anyhow::{Context, Result};
+use blake3::Hash;
+use futures::{StreamExt, future::join_all, stream};
+use log::{Level, debug, error, log_enabled};
+use serde_json::json;
+use tokio_util::task::LocalPoolHandle;
+use yt_dlp::{
+    info_json::InfoJson, json_cast, json_get, options::YoutubeDLOptions, process_ie_result,
+    python_error::PythonError,
+};
+
+use crate::{
+    ansi_escape_codes::{clear_whole_line, move_to_col},
+    app::App,
+    storage::subscriptions::Subscription,
+};
+
+use super::process_subscription;
+
+pub(super) struct Updater {
+    max_backlog: usize,
+    hashes: Vec<Hash>,
+    pool: LocalPoolHandle,
+}
+
+static REACHED_NUMBER: AtomicUsize = const { AtomicUsize::new(1) };
+
+impl Updater {
+    pub(super) fn new(max_backlog: usize, hashes: Vec<Hash>) -> Self {
+        // TODO(@bpeetz): The number should not be hardcoded. <2025-06-14>
+        let pool = LocalPoolHandle::new(16);
+
+        Self {
+            max_backlog,
+            hashes,
+            pool,
+        }
+    }
+
+    pub(super) async fn update(
+        self,
+        app: &App,
+        subscriptions: Vec<Subscription>,
+        total_number: Option<usize>,
+        current_progress: Option<usize>,
+    ) -> Result<()> {
+        let total_number = total_number.unwrap_or(subscriptions.len());
+
+        if let Some(current_progress) = current_progress {
+            REACHED_NUMBER.store(current_progress, Ordering::Relaxed);
+        }
+
+        let mut stream = stream::iter(subscriptions)
+            .map(|sub| self.get_new_entries(sub, total_number))
+            .buffer_unordered(16 * 4);
+
+        while let Some(output) = stream.next().await {
+            let mut entries = output?;
+
+            if let Some(next) = entries.next() {
+                let (sub, entry) = next;
+                process_subscription(app, sub, entry).await?;
+
+                join_all(entries.map(|(sub, entry)| process_subscription(app, sub, entry)))
+                    .await
+                    .into_iter()
+                    .collect::<Result<(), _>>()?;
+            }
+        }
+
+        Ok(())
+    }
+
+    async fn get_new_entries(
+        &self,
+        sub: Subscription,
+        total_number: usize,
+    ) -> Result<impl Iterator<Item = (Subscription, InfoJson)>> {
+        let max_backlog = self.max_backlog;
+        let hashes = self.hashes.clone();
+
+        let yt_dlp = YoutubeDLOptions::new()
+            .set("playliststart", 1)
+            .set("playlistend", max_backlog)
+            .set("noplaylist", false)
+            .set(
+                "extractor_args",
+                json! {{"youtubetab": {"approximate_date": [""]}}},
+            )
+            // TODO: This also removes unlisted and other stuff. Find a good way to remove the
+            // members-only videos from the feed. <2025-04-17>
+            .set("match-filter", "availability=public")
+            .build()?;
+
+        self.pool
+            .spawn_pinned(move || {
+                async move {
+                    if !log_enabled!(Level::Debug) {
+                        clear_whole_line();
+                        move_to_col(1);
+                        eprint!(
+                            "({}/{total_number}) Checking playlist {}...",
+                            REACHED_NUMBER.fetch_add(1, Ordering::Relaxed),
+                            sub.name
+                        );
+                        move_to_col(1);
+                        stderr().flush()?;
+                    }
+
+                    let info = yt_dlp
+                        .extract_info(&sub.url, false, false)
+                        .with_context(|| format!("Failed to get playlist '{}'.", sub.name))?;
+
+                    let empty = vec![];
+                    let entries = info
+                        .get("entries")
+                        .map_or(&empty, |val| json_cast!(val, as_array));
+
+                    let valid_entries: Vec<(Subscription, InfoJson)> = entries
+                        .iter()
+                        .take(max_backlog)
+                        .filter_map(|entry| -> Option<(Subscription, InfoJson)> {
+                            let id = json_get!(entry, "id", as_str);
+                            let extractor_hash = blake3::hash(id.as_bytes());
+
+                            if hashes.contains(&extractor_hash) {
+                                debug!(
+                                    "Skipping entry, as it is already present: '{extractor_hash}'",
+                                );
+                                None
+                            } else {
+                                Some((sub.clone(), json_cast!(entry, as_object).to_owned()))
+                            }
+                        })
+                        .collect();
+
+                    Ok(valid_entries
+                        .into_iter()
+                        .map(|(sub, entry)| {
+                            let inner_yt_dlp = YoutubeDLOptions::new()
+                                .set("noplaylist", true)
+                                .build()
+                                .expect("Worked before, should work now");
+
+                            match inner_yt_dlp.process_ie_result(entry, false) {
+                                Ok(output) => Ok((sub, output)),
+                                Err(err) => Err(err),
+                            }
+                        })
+                        // Don't fail the whole update, if one of the entries fails to fetch.
+                        .filter_map(move |base| match base {
+                            Ok(ok) => Some(ok),
+                            Err(err) => {
+                                match err {
+                                    process_ie_result::Error::Python(PythonError(err)) => {
+                                        if err.contains( "Join this channel to get access to members-only content ",) {
+                                            // Hide this error
+                                        } else {
+                                            // Show the error, but don't fail.
+                                            let error = err
+                                                .strip_prefix("DownloadError: \u{1b}[0;31mERROR:\u{1b}[0m ")
+                                                .unwrap_or(&err);
+                                            error!("While fetching {:#?}: {error}", sub.name);
+                                        }
+
+                                        None
+                                    }
+                                    process_ie_result::Error::InfoJsonPrepare(error) => {
+                                        error!("While fetching {:#?}: Failed to prepare info json: {error}", sub.name);
+                                        None
+                                    },
+                                }
+                            }
+                        }))
+                }
+            })
+            .await?
+    }
+}
diff --git a/crates/yt/src/version/mod.rs b/crates/yt/src/version/mod.rs
new file mode 100644
index 0000000..2cc41c7
--- /dev/null
+++ b/crates/yt/src/version/mod.rs
@@ -0,0 +1,52 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::{Context, Result};
+use sqlx::{SqlitePool, sqlite::SqliteConnectOptions};
+use yt_dlp::options::YoutubeDLOptions;
+
+use crate::{config::Config, storage::migrate::get_version_db};
+
+pub async fn show(config: &Config) -> Result<()> {
+    let db_version = {
+        let options = SqliteConnectOptions::new()
+            .filename(&config.paths.database_path)
+            .optimize_on_close(true, None)
+            .create_if_missing(true);
+
+        let pool = SqlitePool::connect_with(options)
+            .await
+            .context("Failed to connect to database!")?;
+
+        get_version_db(&pool)
+            .await
+            .context("Failed to determine database version")?
+    };
+
+    let (yt_dlp, python) = {
+        let yt_dlp = YoutubeDLOptions::new().build()?;
+        yt_dlp.version()
+    };
+
+    let python = python.replace('\n', " ");
+
+    println!(
+        "{}: {}
+
+db version: {db_version}
+
+yt-dlp: {yt_dlp}
+python: {python}",
+        env!("CARGO_PKG_NAME"),
+        env!("CARGO_PKG_VERSION"),
+    );
+
+    Ok(())
+}
diff --git a/crates/yt/src/videos/display/format_video.rs b/crates/yt/src/videos/display/format_video.rs
new file mode 100644
index 0000000..b97acb1
--- /dev/null
+++ b/crates/yt/src/videos/display/format_video.rs
@@ -0,0 +1,94 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::Result;
+
+use crate::{app::App, comments::output::format_text, storage::video_database::Video};
+
+impl Video {
+    pub async fn to_info_display(&self, app: &App) -> Result<String> {
+        let cache_path = self.cache_path_fmt(app);
+        let description = self.description_fmt();
+        let duration = self.duration_fmt(app);
+        let extractor_hash = self.extractor_hash_fmt(app).await?;
+        let in_playlist = self.in_playlist_fmt(app);
+        let last_status_change = self.last_status_change_fmt(app);
+        let parent_subscription_name = self.parent_subscription_name_fmt(app);
+        let priority = self.priority_fmt();
+        let publish_date = self.publish_date_fmt(app);
+        let status = self.status_fmt(app);
+        let thumbnail_url = self.thumbnail_url_fmt();
+        let title = self.title_fmt(app);
+        let url = self.url_fmt(app);
+        let watch_progress = self.watch_progress_fmt(app);
+        let video_options = self.video_options_fmt(app).await?;
+
+        let watched_percentage_fmt = {
+            if let Some(duration) = self.duration.as_secs() {
+                format!(
+                    " (watched: {:0.0}%)",
+                    (self.watch_progress.as_secs() / duration) * 100
+                )
+            } else {
+                format!(" {watch_progress}")
+            }
+        };
+
+        let string = format!(
+            "\
+{title} ({extractor_hash})
+| -> {cache_path}
+| -> {duration}{watched_percentage_fmt}
+| -> {parent_subscription_name}
+| -> priority: {priority}
+| -> {publish_date}
+| -> status: {status} since {last_status_change} ({in_playlist})
+| -> {thumbnail_url}
+| -> {url}
+| -> options: {}
+{}\n",
+            video_options.to_string().trim(),
+            format_text(description.to_string().as_str())
+        );
+        Ok(string)
+    }
+
+    pub async fn to_line_display(&self, app: &App) -> Result<String> {
+        let f = format!(
+            "{} {} {} {} {} {}",
+            self.status_fmt(app),
+            self.extractor_hash_fmt(app).await?,
+            self.title_fmt(app),
+            self.publish_date_fmt(app),
+            self.parent_subscription_name_fmt(app),
+            self.duration_fmt(app)
+        );
+
+        Ok(f)
+    }
+
+    pub async fn to_select_file_display(&self, app: &App) -> Result<String> {
+        let f = format!(
+            r#"{}{} {} "{}" "{}" "{}" "{}" "{}"{}"#,
+            self.status_fmt_no_color(),
+            self.video_options_fmt_no_color(app).await?,
+            self.extractor_hash_fmt_no_color(app).await?,
+            self.title_fmt_no_color(),
+            self.publish_date_fmt_no_color(),
+            self.parent_subscription_name_fmt_no_color(),
+            self.duration_fmt_no_color(),
+            self.url_fmt_no_color(),
+            '\n'
+        );
+
+        Ok(f)
+    }
+}
diff --git a/crates/yt/src/videos/display/mod.rs b/crates/yt/src/videos/display/mod.rs
new file mode 100644
index 0000000..1188569
--- /dev/null
+++ b/crates/yt/src/videos/display/mod.rs
@@ -0,0 +1,229 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use owo_colors::OwoColorize;
+use url::Url;
+
+use crate::{
+    app::App,
+    select::selection_file::duration::MaybeDuration,
+    storage::video_database::{TimeStamp, Video, VideoStatus, get::get_video_opts},
+};
+
+use anyhow::{Context, Result};
+
+pub mod format_video;
+
+macro_rules! get {
+    ($value:expr, $key:ident, $name:expr, $code:tt) => {
+        if let Some(value) = &$value.$key {
+            $code(value)
+        } else {
+            concat!("[No ", $name, "]").to_owned()
+        }
+    };
+}
+
+fn maybe_add_color<F>(app: &App, input: String, mut color_fn: F) -> String
+where
+    F: FnMut(String) -> String,
+{
+    if app.config.global.display_colors {
+        color_fn(input)
+    } else {
+        input
+    }
+}
+impl Video {
+    #[must_use]
+    pub fn cache_path_fmt(&self, app: &App) -> String {
+        let cache_path = if let VideoStatus::Cached {
+            cache_path,
+            is_focused: _,
+        } = &self.status
+        {
+            cache_path.to_string_lossy().to_string()
+        } else {
+            "[No Cache Path]".to_owned()
+        };
+        maybe_add_color(app, cache_path, |v| v.blue().bold().to_string())
+    }
+
+    #[must_use]
+    pub fn description_fmt(&self) -> String {
+        get!(
+            self,
+            description,
+            "Description",
+            (|value: &str| value.to_owned())
+        )
+    }
+
+    #[must_use]
+    pub fn duration_fmt_no_color(&self) -> String {
+        self.duration.to_string()
+    }
+    #[must_use]
+    pub fn duration_fmt(&self, app: &App) -> String {
+        let duration = self.duration_fmt_no_color();
+        maybe_add_color(app, duration, |v| v.cyan().bold().to_string())
+    }
+
+    #[must_use]
+    pub fn watch_progress_fmt(&self, app: &App) -> String {
+        maybe_add_color(
+            app,
+            MaybeDuration::from_std(self.watch_progress).to_string(),
+            |v| v.cyan().bold().to_string(),
+        )
+    }
+
+    pub async fn extractor_hash_fmt_no_color(&self, app: &App) -> Result<String> {
+        let hash = self
+            .extractor_hash
+            .into_short_hash(app)
+            .await
+            .with_context(|| {
+                format!(
+                    "Failed to format extractor hash, whilst formatting video: '{}'",
+                    self.title
+                )
+            })?
+            .to_string();
+        Ok(hash)
+    }
+    pub async fn extractor_hash_fmt(&self, app: &App) -> Result<String> {
+        let hash = self.extractor_hash_fmt_no_color(app).await?;
+        Ok(maybe_add_color(app, hash, |v| {
+            v.bright_purple().italic().to_string()
+        }))
+    }
+
+    #[must_use]
+    pub fn in_playlist_fmt(&self, app: &App) -> String {
+        let output = match &self.status {
+            VideoStatus::Pick
+            | VideoStatus::Watch
+            | VideoStatus::Watched
+            | VideoStatus::Drop
+            | VideoStatus::Dropped => "Not in the playlist",
+            VideoStatus::Cached { is_focused, .. } => {
+                if *is_focused {
+                    "In the playlist and focused"
+                } else {
+                    "In the playlist"
+                }
+            }
+        };
+        maybe_add_color(app, output.to_owned(), |v| v.yellow().italic().to_string())
+    }
+    #[must_use]
+    pub fn last_status_change_fmt(&self, app: &App) -> String {
+        maybe_add_color(app, self.last_status_change.to_string(), |v| {
+            v.bright_cyan().to_string()
+        })
+    }
+
+    #[must_use]
+    pub fn parent_subscription_name_fmt_no_color(&self) -> String {
+        get!(
+            self,
+            parent_subscription_name,
+            "author",
+            (|sub: &str| sub.replace('"', "'"))
+        )
+    }
+    #[must_use]
+    pub fn parent_subscription_name_fmt(&self, app: &App) -> String {
+        let psn = self.parent_subscription_name_fmt_no_color();
+        maybe_add_color(app, psn, |v| v.bright_magenta().to_string())
+    }
+
+    #[must_use]
+    pub fn priority_fmt(&self) -> String {
+        self.priority.to_string()
+    }
+
+    #[must_use]
+    pub fn publish_date_fmt_no_color(&self) -> String {
+        get!(
+            self,
+            publish_date,
+            "release date",
+            (|date: &TimeStamp| date.to_string())
+        )
+    }
+    #[must_use]
+    pub fn publish_date_fmt(&self, app: &App) -> String {
+        let date = self.publish_date_fmt_no_color();
+        maybe_add_color(app, date, |v| v.bright_white().bold().to_string())
+    }
+
+    #[must_use]
+    pub fn status_fmt_no_color(&self) -> String {
+        // TODO: We might support `.trim()`ing that, as the extra whitespace could be bad in the
+        // selection file. <2024-10-07>
+        self.status.as_marker().as_command().to_string()
+    }
+    #[must_use]
+    pub fn status_fmt(&self, app: &App) -> String {
+        let status = self.status_fmt_no_color();
+        maybe_add_color(app, status, |v| v.red().bold().to_string())
+    }
+
+    #[must_use]
+    pub fn thumbnail_url_fmt(&self) -> String {
+        get!(
+            self,
+            thumbnail_url,
+            "thumbnail URL",
+            (|url: &Url| url.to_string())
+        )
+    }
+
+    #[must_use]
+    pub fn title_fmt_no_color(&self) -> String {
+        self.title.replace(['"', '„', '”', '“'], "'")
+    }
+    #[must_use]
+    pub fn title_fmt(&self, app: &App) -> String {
+        let title = self.title_fmt_no_color();
+        maybe_add_color(app, title, |v| v.green().bold().to_string())
+    }
+
+    #[must_use]
+    pub fn url_fmt_no_color(&self) -> String {
+        self.url.as_str().replace('"', "\\\"")
+    }
+    #[must_use]
+    pub fn url_fmt(&self, app: &App) -> String {
+        let url = self.url_fmt_no_color();
+        maybe_add_color(app, url, |v| v.italic().to_string())
+    }
+
+    pub async fn video_options_fmt_no_color(&self, app: &App) -> Result<String> {
+        let video_options = {
+            let opts = get_video_opts(app, &self.extractor_hash)
+                .await
+                .with_context(|| {
+                    format!("Failed to get video options for video: '{}'", self.title)
+                })?
+                .to_cli_flags(app);
+            let opts_white = if opts.is_empty() { "" } else { " " };
+            format!("{opts_white}{opts}")
+        };
+        Ok(video_options)
+    }
+    pub async fn video_options_fmt(&self, app: &App) -> Result<String> {
+        let opts = self.video_options_fmt_no_color(app).await?;
+        Ok(maybe_add_color(app, opts, |v| v.bright_green().to_string()))
+    }
+}
diff --git a/crates/yt/src/videos/mod.rs b/crates/yt/src/videos/mod.rs
new file mode 100644
index 0000000..960340b
--- /dev/null
+++ b/crates/yt/src/videos/mod.rs
@@ -0,0 +1,54 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::Result;
+use futures::{TryStreamExt, stream::FuturesUnordered};
+
+pub mod display;
+
+use crate::{
+    app::App,
+    storage::video_database::{Video, VideoStatusMarker, get},
+};
+
+async fn to_line_display_owned(video: Video, app: &App) -> Result<String> {
+    video.to_line_display(app).await
+}
+
+pub async fn query(app: &App, limit: Option<usize>, search_query: Option<String>) -> Result<()> {
+    let all_videos = get::videos(app, VideoStatusMarker::ALL).await?;
+
+    // turn one video to a color display, to pre-warm the hash shrinking cache
+    if let Some(val) = all_videos.first() {
+        val.to_line_display(app).await?;
+    }
+
+    let limit = limit.unwrap_or(all_videos.len());
+
+    let all_video_strings: Vec<String> = all_videos
+        .into_iter()
+        .take(limit)
+        .map(|vid| to_line_display_owned(vid, app))
+        .collect::<FuturesUnordered<_>>()
+        .try_collect::<Vec<String>>()
+        .await?;
+
+    if let Some(query) = search_query {
+        all_video_strings
+            .into_iter()
+            .filter(|video| video.to_lowercase().contains(&query.to_lowercase()))
+            .for_each(|video| println!("{video}"));
+    } else {
+        println!("{}", all_video_strings.join("\n"));
+    }
+
+    Ok(())
+}
diff --git a/crates/yt/src/watch/mod.rs b/crates/yt/src/watch/mod.rs
new file mode 100644
index 0000000..c32a76f
--- /dev/null
+++ b/crates/yt/src/watch/mod.rs
@@ -0,0 +1,178 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{
+    sync::{
+        Arc,
+        atomic::{AtomicBool, Ordering},
+    },
+    time::Duration,
+};
+
+use anyhow::{Context, Result};
+use libmpv2::{Mpv, events::EventContext};
+use log::{debug, info, trace, warn};
+use playlist_handler::{reload_mpv_playlist, save_watch_progress};
+use tokio::{task, time::sleep};
+
+use self::playlist_handler::Status;
+use crate::{
+    app::App,
+    cache::maintain,
+    storage::video_database::{get, notify::wait_for_db_write},
+};
+
+pub mod playlist;
+pub mod playlist_handler;
+
+fn init_mpv(app: &App) -> Result<(Mpv, EventContext)> {
+    // set some default values, to make things easier (these can be overridden by the config file,
+    // which we load later)
+    let mpv = Mpv::with_initializer(|mpv| {
+        // Enable default key bindings, so the user can actually interact with
+        // the player (and e.g. close the window).
+        mpv.set_property("input-default-bindings", "yes")?;
+        mpv.set_property("input-vo-keyboard", "yes")?;
+
+        // Show the on screen controller.
+        mpv.set_property("osc", "yes")?;
+
+        // Don't automatically advance to the next video (or exit the player)
+        mpv.set_option("keep-open", "always")?;
+
+        // Always display an window, even for non-video playback.
+        // As mpv does not have cli access, no window means no control and no user feedback.
+        mpv.set_option("force-window", "yes")?;
+        Ok(())
+    })
+    .context("Failed to initialize mpv")?;
+
+    let config_path = &app.config.paths.mpv_config_path;
+    if config_path.try_exists()? {
+        info!("Found mpv.conf at '{}'!", config_path.display());
+        mpv.command(
+            "load-config-file",
+            &[config_path
+                .to_str()
+                .context("Failed to parse the config path is utf8-stringt")?],
+        )?;
+    } else {
+        warn!(
+            "Did not find a mpv.conf file at '{}'",
+            config_path.display()
+        );
+    }
+
+    let input_path = &app.config.paths.mpv_input_path;
+    if input_path.try_exists()? {
+        info!("Found mpv.input.conf at '{}'!", input_path.display());
+        mpv.command(
+            "load-input-conf",
+            &[input_path
+                .to_str()
+                .context("Failed to parse the input path as utf8 string")?],
+        )?;
+    } else {
+        warn!(
+            "Did not find a mpv.input.conf file at '{}'",
+            input_path.display()
+        );
+    }
+
+    let ev_ctx = EventContext::new(mpv.ctx);
+    ev_ctx.disable_deprecated_events()?;
+
+    Ok((mpv, ev_ctx))
+}
+
+pub async fn watch(app: Arc<App>) -> Result<()> {
+    maintain(&app, false).await?;
+
+    let (mpv, mut ev_ctx) = init_mpv(&app).context("Failed to initialize mpv instance")?;
+    let mpv = Arc::new(mpv);
+    reload_mpv_playlist(&app, &mpv, None, None).await?;
+
+    let should_break = Arc::new(AtomicBool::new(false));
+
+    let local_app = Arc::clone(&app);
+    let local_mpv = Arc::clone(&mpv);
+    let local_should_break = Arc::clone(&should_break);
+    let progress_handle = task::spawn(async move {
+        loop {
+            if local_should_break.load(Ordering::Relaxed) {
+                break;
+            }
+
+            if get::currently_focused_video(&local_app).await?.is_some() {
+                save_watch_progress(&local_app, &local_mpv).await?;
+            }
+
+            sleep(Duration::from_secs(30)).await;
+        }
+
+        Ok::<(), anyhow::Error>(())
+    });
+
+    let mut have_warned = (false, 0);
+    'watchloop: loop {
+        'waitloop: while let Ok(value) = playlist_handler::status(&app).await {
+            match value {
+                Status::NoMoreAvailable => {
+                    break 'watchloop;
+                }
+                Status::NoCached { marked_watch } => {
+                    // try again next time.
+                    if have_warned.0 {
+                        if have_warned.1 != marked_watch {
+                            warn!("Now {} videos are marked as to be watched.", marked_watch);
+                            have_warned.1 = marked_watch;
+                        }
+                    } else {
+                        warn!(
+                            "There is nothing to watch yet, but still {} videos marked as to be watched. \
+                        Will idle, until they become available",
+                            marked_watch
+                        );
+                        have_warned = (true, marked_watch);
+                    }
+                    wait_for_db_write(&app).await?;
+                }
+                Status::Available { newly_available } => {
+                    debug!("Check and found {newly_available} videos!");
+                    have_warned.0 = false;
+
+                    // Something just became available!
+                    break 'waitloop;
+                }
+            }
+        }
+
+        if let Some(ev) = ev_ctx.wait_event(30.) {
+            match ev {
+                Ok(event) => {
+                    trace!("Mpv event triggered: {:#?}", event);
+                    if playlist_handler::handle_mpv_event(&app, &mpv, &event)
+                        .await
+                        .with_context(|| format!("Failed to handle mpv event: '{event:#?}'"))?
+                    {
+                        break;
+                    }
+                }
+                Err(e) => debug!("Mpv Event errored: {}", e),
+            }
+        }
+    }
+
+    should_break.store(true, Ordering::Relaxed);
+    progress_handle.await??;
+
+    Ok(())
+}
diff --git a/crates/yt/src/watch/playlist.rs b/crates/yt/src/watch/playlist.rs
new file mode 100644
index 0000000..ff383d0
--- /dev/null
+++ b/crates/yt/src/watch/playlist.rs
@@ -0,0 +1,99 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::path::Path;
+
+use crate::{
+    ansi_escape_codes::{cursor_up, erase_in_display_from_cursor},
+    app::App,
+    storage::video_database::{Video, VideoStatus, get, notify::wait_for_db_write},
+};
+
+use anyhow::Result;
+use futures::{TryStreamExt, stream::FuturesOrdered};
+
+/// Extract the values of the [`VideoStatus::Cached`] value from a Video.
+fn cache_values(video: &Video) -> (&Path, bool) {
+    if let VideoStatus::Cached {
+        cache_path,
+        is_focused,
+    } = &video.status
+    {
+        (cache_path, *is_focused)
+    } else {
+        unreachable!("All of these videos should be cached");
+    }
+}
+
+/// # Panics
+/// Only if internal assertions fail.
+pub async fn playlist(app: &App, watch: bool) -> Result<()> {
+    let mut previous_output_length = 0;
+    loop {
+        let playlist = get::playlist(app).await?.to_videos();
+
+        let output = playlist
+            .into_iter()
+            .map(|video| async move {
+                let mut output = String::new();
+
+                let (_, is_focused) = cache_values(&video);
+
+                if is_focused {
+                    output.push_str("🔻 ");
+                } else {
+                    output.push_str("  ");
+                }
+
+                output.push_str(&video.title_fmt(app));
+
+                output.push_str(" (");
+                output.push_str(&video.parent_subscription_name_fmt(app));
+                output.push(')');
+
+                output.push_str(" [");
+                output.push_str(&video.duration_fmt(app));
+
+                if is_focused {
+                    output.push_str(" (");
+                    output.push_str(&if let Some(duration) = video.duration.as_secs() {
+                        format!("{:0.0}%", (video.watch_progress.as_secs() / duration) * 100)
+                    } else {
+                        video.watch_progress_fmt(app)
+                    });
+                    output.push(')');
+                }
+                output.push(']');
+
+                output.push('\n');
+
+                Ok::<String, anyhow::Error>(output)
+            })
+            .collect::<FuturesOrdered<_>>()
+            .try_collect::<String>()
+            .await?;
+
+        // Delete the previous output
+        cursor_up(previous_output_length);
+        erase_in_display_from_cursor();
+
+        previous_output_length = output.chars().filter(|ch| *ch == '\n').count();
+
+        print!("{output}");
+
+        if !watch {
+            break;
+        }
+
+        wait_for_db_write(app).await?;
+    }
+
+    Ok(())
+}
diff --git a/crates/yt/src/watch/playlist_handler/client_messages/mod.rs b/crates/yt/src/watch/playlist_handler/client_messages/mod.rs
new file mode 100644
index 0000000..c05ca87
--- /dev/null
+++ b/crates/yt/src/watch/playlist_handler/client_messages/mod.rs
@@ -0,0 +1,99 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{env, time::Duration};
+
+use crate::{app::App, comments};
+
+use anyhow::{Context, Result, bail};
+use libmpv2::Mpv;
+use tokio::process::Command;
+
+use super::mpv_message;
+
+async fn run_self_in_external_command(app: &App, args: &[&str]) -> Result<()> {
+    // TODO(@bpeetz): Can we trust this value? <2025-06-15>
+    let binary =
+        env::current_exe().context("Failed to determine the current executable to re-execute")?;
+
+    let status = Command::new("riverctl")
+        .args(["focus-output", "next"])
+        .status()
+        .await?;
+    if !status.success() {
+        bail!("focusing the next output failed!");
+    }
+
+    let arguments = [
+        &[
+            "--title",
+            "floating please",
+            "--command",
+            binary
+                .to_str()
+                .context("Failed to turn the executable path to a utf8-string")?,
+            "--db-path",
+            app.config
+                .paths
+                .database_path
+                .to_str()
+                .context("Failed to parse the database_path as a utf8-string")?,
+        ],
+        args,
+    ]
+    .concat();
+
+    let status = Command::new("alacritty").args(arguments).status().await?;
+    if !status.success() {
+        bail!("Falied to start `yt comments`");
+    }
+
+    let status = Command::new("riverctl")
+        .args(["focus-output", "next"])
+        .status()
+        .await?;
+
+    if !status.success() {
+        bail!("focusing the next output failed!");
+    }
+
+    Ok(())
+}
+
+pub(super) async fn handle_yt_description_external(app: &App) -> Result<()> {
+    run_self_in_external_command(app, &["description"]).await?;
+    Ok(())
+}
+pub(super) async fn handle_yt_description_local(app: &App, mpv: &Mpv) -> Result<()> {
+    let description: String = comments::description::get(app)
+        .await?
+        .chars()
+        .take(app.config.watch.local_displays_length)
+        .collect();
+
+    mpv_message(mpv, &description, Duration::from_secs(6))?;
+    Ok(())
+}
+
+pub(super) async fn handle_yt_comments_external(app: &App) -> Result<()> {
+    run_self_in_external_command(app, &["comments"]).await?;
+    Ok(())
+}
+pub(super) async fn handle_yt_comments_local(app: &App, mpv: &Mpv) -> Result<()> {
+    let comments: String = comments::get(app)
+        .await?
+        .render(false)
+        .chars()
+        .take(app.config.watch.local_displays_length)
+        .collect();
+
+    mpv_message(mpv, &comments, Duration::from_secs(6))?;
+    Ok(())
+}
diff --git a/crates/yt/src/watch/playlist_handler/mod.rs b/crates/yt/src/watch/playlist_handler/mod.rs
new file mode 100644
index 0000000..29b8f39
--- /dev/null
+++ b/crates/yt/src/watch/playlist_handler/mod.rs
@@ -0,0 +1,342 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::{cmp::Ordering, time::Duration};
+
+use crate::{
+    app::App,
+    storage::video_database::{
+        VideoStatus, VideoStatusMarker,
+        extractor_hash::ExtractorHash,
+        get::{self, Playlist, PlaylistIndex},
+        set,
+    },
+};
+
+use anyhow::{Context, Result};
+use libmpv2::{EndFileReason, Mpv, events::Event};
+use log::{debug, info};
+
+mod client_messages;
+
+#[derive(Debug, Clone, Copy)]
+pub enum Status {
+    /// There are no videos cached and no more marked to be watched.
+    /// Waiting is pointless.
+    NoMoreAvailable,
+
+    /// There are no videos cached, but some (> 0) are marked to be watched.
+    /// So we should wait for them to become available.
+    NoCached { marked_watch: usize },
+
+    /// There are videos cached and ready to be inserted into the playback queue.
+    Available { newly_available: usize },
+}
+
+fn mpv_message(mpv: &Mpv, message: &str, time: Duration) -> Result<()> {
+    mpv.command(
+        "show-text",
+        &[message, time.as_millis().to_string().as_str()],
+    )?;
+    Ok(())
+}
+
+async fn apply_video_options(app: &App, mpv: &Mpv, video: &ExtractorHash) -> Result<()> {
+    let options = get::video_mpv_opts(app, video).await?;
+    let video = get::video_by_hash(app, video).await?;
+
+    mpv.set_property("speed", options.playback_speed)?;
+
+    // We already start at 0, so setting it twice adds a uncomfortable skip sound.
+    if video.watch_progress.as_secs() != 0 {
+        mpv.set_property(
+            "time-pos",
+            i64::try_from(video.watch_progress.as_secs()).expect("This should not overflow"),
+        )?;
+    }
+    Ok(())
+}
+
+async fn mark_video_watched(app: &App, mpv: &Mpv) -> Result<()> {
+    let current_video = get::currently_focused_video(app)
+        .await?
+        .expect("This should be some at this point");
+
+    debug!(
+        "playlist handler will mark video '{}' watched.",
+        current_video.title
+    );
+
+    save_watch_progress(app, mpv).await?;
+
+    set::video_watched(app, &current_video.extractor_hash).await?;
+
+    Ok(())
+}
+
+/// Saves the `watch_progress` of the currently focused video.
+pub(super) async fn save_watch_progress(app: &App, mpv: &Mpv) -> Result<()> {
+    let current_video = get::currently_focused_video(app)
+        .await?
+        .expect("This should be some at this point");
+    let watch_progress = u32::try_from(
+        mpv.get_property::<i64>("time-pos")
+            .context("Failed to get the watchprogress of the currently playling video")?,
+    )
+    .expect("This conversion should never fail as the `time-pos` property is positive");
+
+    debug!(
+        "Setting the watch progress for the current_video '{}' to {watch_progress}s",
+        current_video.title_fmt_no_color()
+    );
+
+    set::video_watch_progress(app, &current_video.extractor_hash, watch_progress).await
+}
+
+/// Sync the mpv playlist with the internal playlist.
+///
+/// This takes an `maybe_playlist` argument, if you have already fetched the playlist and want to
+/// add that.
+pub(super) async fn reload_mpv_playlist(
+    app: &App,
+    mpv: &Mpv,
+    maybe_playlist: Option<Playlist>,
+    maybe_index: Option<PlaylistIndex>,
+) -> Result<()> {
+    fn get_playlist_count(mpv: &Mpv) -> Result<usize> {
+        mpv.get_property::<i64>("playlist/count")
+            .context("Failed to get mpv playlist len")
+            .map(|count| {
+                usize::try_from(count).expect("The playlist_count should always be positive")
+            })
+    }
+
+    if get_playlist_count(mpv)? != 0 {
+        // We could also use `loadlist`, but that would require use to start a unix socket or even
+        // write all the video paths to a file beforehand
+        mpv.command("playlist-clear", &[])?;
+        mpv.command("playlist-remove", &["current"])?;
+    }
+
+    assert_eq!(
+        get_playlist_count(mpv)?,
+        0,
+        "The playlist should be empty at this point."
+    );
+
+    let playlist = if let Some(p) = maybe_playlist {
+        p
+    } else {
+        get::playlist(app).await?
+    };
+
+    debug!("Will add {} videos to playlist.", playlist.len());
+    playlist.into_iter().try_for_each(|cache_path| {
+        mpv.command(
+            "loadfile",
+            &[
+                cache_path.to_str().with_context(|| {
+                    format!(
+                        "Failed to parse the video cache path ('{}') as valid utf8",
+                        cache_path.display()
+                    )
+                })?,
+                "append-play",
+            ],
+        )?;
+
+        Ok::<(), anyhow::Error>(())
+    })?;
+
+    let index = if let Some(index) = maybe_index {
+        let index = usize::from(index);
+        let playlist_length = get_playlist_count(mpv)?;
+
+        match index.cmp(&playlist_length) {
+            Ordering::Greater => {
+                unreachable!(
+                    "The index '{index}' execeeds the playlist length '{playlist_length}'."
+                );
+            }
+            Ordering::Less => index,
+            Ordering::Equal => {
+                // The index is pointing to the end of the playlist. We could either go the second
+                // to last entry (i.e., one entry back) or wrap around to the start.
+                // We wrap around:
+                0
+            }
+        }
+    } else {
+        get::current_playlist_index(app)
+            .await?
+            .map_or(0, usize::from)
+    };
+    mpv.set_property("playlist-pos", index.to_string().as_str())?;
+
+    Ok(())
+}
+
+/// Return the status of the playback queue
+pub async fn status(app: &App) -> Result<Status> {
+    let playlist = get::playlist(app).await?;
+
+    let playlist_len = playlist.len();
+    let marked_watch_num = get::videos(app, &[VideoStatusMarker::Watch]).await?.len();
+
+    if playlist_len == 0 && marked_watch_num == 0 {
+        Ok(Status::NoMoreAvailable)
+    } else if playlist_len == 0 && marked_watch_num != 0 {
+        Ok(Status::NoCached {
+            marked_watch: marked_watch_num,
+        })
+    } else if playlist_len != 0 {
+        Ok(Status::Available {
+            newly_available: playlist_len,
+        })
+    } else {
+        unreachable!(
+            "The playlist length is {playlist_len}, but the number of marked watch videos is {marked_watch_num}! This is a bug."
+        );
+    }
+}
+
+/// # Returns
+/// This will return [`true`], if the event handling should be stopped
+///
+/// # Panics
+/// Only if internal assertions fail.
+#[allow(clippy::too_many_lines)]
+pub async fn handle_mpv_event(app: &App, mpv: &Mpv, event: &Event<'_>) -> Result<bool> {
+    match event {
+        Event::EndFile(r) => match r.reason {
+            EndFileReason::Eof => {
+                info!("Mpv reached the end of the current video. Marking it watched.");
+                mark_video_watched(app, mpv).await?;
+                reload_mpv_playlist(app, mpv, None, None).await?;
+            }
+            EndFileReason::Stop => {
+                // This reason is incredibly ambiguous. It _both_ means actually pausing a
+                // video and going to the next one in the playlist.
+                // Oh, and it's also called, when a video is removed from the playlist (at
+                // least via "playlist-remove current")
+                info!("Paused video (or went to next playlist entry); Doing nothing");
+            }
+            EndFileReason::Quit => {
+                info!("Mpv quit. Exiting playback");
+
+                save_watch_progress(app, mpv).await?;
+
+                return Ok(true);
+            }
+            EndFileReason::Error => {
+                unreachable!("This should have been raised as a separate error")
+            }
+            EndFileReason::Redirect => {
+                // TODO: We probably need to handle this somehow <2025-02-17>
+            }
+        },
+        Event::StartFile(_) => {
+            let mpv_pos = usize::try_from(mpv.get_property::<i64>("playlist-pos")?)
+                .expect("The value is strictly positive");
+
+            let next_video = {
+                let yt_pos = get::current_playlist_index(app).await?.map(usize::from);
+
+                if (Some(mpv_pos) != yt_pos) || yt_pos.is_none() {
+                    let playlist = get::playlist(app).await?;
+                    let video = playlist
+                        .get(PlaylistIndex::from(mpv_pos))
+                        .expect("The mpv pos should not be out of bounds");
+
+                    set::focused(
+                        app,
+                        &video.extractor_hash,
+                        get::currently_focused_video(app)
+                            .await?
+                            .as_ref()
+                            .map(|v| &v.extractor_hash),
+                    )
+                    .await?;
+
+                    video.extractor_hash
+                } else {
+                    get::currently_focused_video(app)
+                        .await?
+                        .expect("We have a focused video")
+                        .extractor_hash
+                }
+            };
+
+            apply_video_options(app, mpv, &next_video).await?;
+        }
+        Event::Seek => {
+            save_watch_progress(app, mpv).await?;
+        }
+        Event::ClientMessage(a) => {
+            debug!("Got Client Message event: '{}'", a.join(" "));
+
+            match a.as_slice() {
+                &["yt-comments-external"] => {
+                    client_messages::handle_yt_comments_external(app).await?;
+                }
+                &["yt-comments-local"] => {
+                    client_messages::handle_yt_comments_local(app, mpv).await?;
+                }
+
+                &["yt-description-external"] => {
+                    client_messages::handle_yt_description_external(app).await?;
+                }
+                &["yt-description-local"] => {
+                    client_messages::handle_yt_description_local(app, mpv).await?;
+                }
+
+                &["yt-mark-picked"] => {
+                    let current_video = get::currently_focused_video(app)
+                        .await?
+                        .expect("This should exist at this point");
+                    let current_index = get::current_playlist_index(app)
+                        .await?
+                        .expect("This should exist, as we can mark this video picked");
+
+                    save_watch_progress(app, mpv).await?;
+
+                    set::video_status(
+                        app,
+                        &current_video.extractor_hash,
+                        VideoStatus::Pick,
+                        Some(current_video.priority),
+                    )
+                    .await?;
+
+                    reload_mpv_playlist(app, mpv, None, Some(current_index)).await?;
+                    mpv_message(mpv, "Marked the video as picked", Duration::from_secs(3))?;
+                }
+                &["yt-mark-watched"] => {
+                    let current_index = get::current_playlist_index(app)
+                        .await?
+                        .expect("This should exist, as we can mark this video picked");
+                    mark_video_watched(app, mpv).await?;
+
+                    reload_mpv_playlist(app, mpv, None, Some(current_index)).await?;
+                    mpv_message(mpv, "Marked the video watched", Duration::from_secs(3))?;
+                }
+                &["yt-check-new-videos"] => {
+                    reload_mpv_playlist(app, mpv, None, None).await?;
+                }
+                other => {
+                    debug!("Unknown message: {}", other.join(" "));
+                }
+            }
+        }
+        _ => {}
+    }
+
+    Ok(false)
+}
diff --git a/crates/yt_dlp/Cargo.toml b/crates/yt_dlp/Cargo.toml
index 1d34371..3632b23 100644
--- a/crates/yt_dlp/Cargo.toml
+++ b/crates/yt_dlp/Cargo.toml
@@ -10,7 +10,7 @@
 
 [package]
 name = "yt_dlp"
-description = "A wrapper around the python yt_dlp library"
+description = "A rust ffi wrapper library for the python yt_dlp library"
 keywords = []
 categories = []
 version.workspace = true
@@ -19,19 +19,25 @@ authors.workspace = true
 license.workspace = true
 repository.workspace = true
 rust-version.workspace = true
-publish = false
+publish = true
 
 [dependencies]
-pyo3 = { version = "0.23.3", features = ["auto-initialize"] }
-bytes.workspace = true
+curl = "0.4.48"
+indexmap = { version = "2.9.0", default-features = false }
 log.workspace = true
-serde.workspace = true
+rustpython = { git = "https://github.com/RustPython/RustPython.git", rev = "6a992d4f", features = [
+  "threading",
+  "stdlib",
+  "stdio",
+  "freeze-stdlib",
+  "importlib",
+  "ssl",
+], default-features = false }
+serde = { workspace = true, features = ["derive"] }
 serde_json.workspace = true
+thiserror = "2.0.12"
 url.workspace = true
 
-[dev-dependencies]
-tokio.workspace = true
-
 [lints]
 workspace = true
 
diff --git a/crates/yt_dlp/README.md b/crates/yt_dlp/README.md
index 591ef2e..ece8540 100644
--- a/crates/yt_dlp/README.md
+++ b/crates/yt_dlp/README.md
@@ -12,7 +12,7 @@ If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
 # Yt_py
 
-> \[can be empty\]
+> [can be empty]
 
 Some text about the project.
 
diff --git a/crates/yt_dlp/src/duration.rs b/crates/yt_dlp/src/duration.rs
deleted file mode 100644
index 19181a5..0000000
--- a/crates/yt_dlp/src/duration.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// TODO: This file should be de-duplicated with the same file in the 'yt' crate <2024-06-25>
-
-#[derive(Debug, Clone, Copy)]
-pub struct Duration {
-    time: u32,
-}
-
-impl From<&str> for Duration {
-    fn from(v: &str) -> Self {
-        let buf: Vec<_> = v.split(':').take(2).collect();
-        Self {
-            time: (buf[0].parse::<u32>().expect("Should be a number") * 60)
-                + buf[1].parse::<u32>().expect("Should be a number"),
-        }
-    }
-}
-
-impl From<Option<f64>> for Duration {
-    fn from(value: Option<f64>) -> Self {
-        Self {
-            #[allow(
-                clippy::cast_possible_truncation,
-                clippy::cast_precision_loss,
-                clippy::cast_sign_loss
-            )]
-            time: value.unwrap_or(0.0).ceil() as u32,
-        }
-    }
-}
-
-impl std::fmt::Display for Duration {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
-        const SECOND: u32 = 1;
-        const MINUTE: u32 = 60 * SECOND;
-        const HOUR: u32 = 60 * MINUTE;
-
-        let base_hour = self.time - (self.time % HOUR);
-        let base_min = (self.time % HOUR) - ((self.time % HOUR) % MINUTE);
-        let base_sec = (self.time % HOUR) % MINUTE;
-
-        let h = base_hour / HOUR;
-        let m = base_min / MINUTE;
-        let s = base_sec / SECOND;
-
-        if self.time == 0 {
-            write!(f, "0s")
-        } else if h > 0 {
-            write!(f, "{h}h {m}m")
-        } else {
-            write!(f, "{m}m {s}s")
-        }
-    }
-}
-#[cfg(test)]
-mod test {
-    use super::Duration;
-
-    #[test]
-    fn test_display_duration_1h() {
-        let dur = Duration { time: 60 * 60 };
-        assert_eq!("1h 0m".to_owned(), dur.to_string());
-    }
-    #[test]
-    fn test_display_duration_30min() {
-        let dur = Duration { time: 60 * 30 };
-        assert_eq!("30m 0s".to_owned(), dur.to_string());
-    }
-}
diff --git a/crates/yt_dlp/src/info_json.rs b/crates/yt_dlp/src/info_json.rs
new file mode 100644
index 0000000..31f4a69
--- /dev/null
+++ b/crates/yt_dlp/src/info_json.rs
@@ -0,0 +1,60 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use rustpython::vm::{
+    PyRef, VirtualMachine,
+    builtins::{PyDict, PyStr},
+};
+
+pub type InfoJson = serde_json::Map<String, serde_json::Value>;
+
+pub fn json_loads(
+    input: serde_json::Map<String, serde_json::Value>,
+    vm: &VirtualMachine,
+) -> PyRef<PyDict> {
+    let json = vm.import("json", 0).expect("Module exists");
+    let loads = json.get_attr("loads", vm).expect("Method exists");
+    let self_str = serde_json::to_string(&serde_json::Value::Object(input)).expect("Vaild json");
+    let dict = loads
+        .call((self_str,), vm)
+        .expect("Vaild json is always a valid dict");
+
+    dict.downcast().expect("Should always be a dict")
+}
+
+/// # Panics
+/// If expectation about python operations fail.
+pub fn json_dumps(
+    input: PyRef<PyDict>,
+    vm: &VirtualMachine,
+) -> serde_json::Map<String, serde_json::Value> {
+    let json = vm.import("json", 0).expect("Module exists");
+    let dumps = json.get_attr("dumps", vm).expect("Method exists");
+    let dict = dumps
+        .call((input,), vm)
+        .map_err(|err| vm.print_exception(err))
+        .expect("Might not always work, but for our dicts it works");
+
+    let string: PyRef<PyStr> = dict.downcast().expect("Should always be a string");
+
+    let real_string = string.to_str().expect("Should be valid utf8");
+
+    // {
+    //     let mut file = File::create("debug.dump.json").unwrap();
+    //     write!(file, "{}", real_string).unwrap();
+    // }
+
+    let value: serde_json::Value = serde_json::from_str(real_string).expect("Should be valid json");
+
+    match value {
+        serde_json::Value::Object(map) => map,
+        _ => unreachable!("These should not be json.dumps output"),
+    }
+}
diff --git a/crates/yt_dlp/src/lib.rs b/crates/yt_dlp/src/lib.rs
index 970bfe2..a03e444 100644
--- a/crates/yt_dlp/src/lib.rs
+++ b/crates/yt_dlp/src/lib.rs
@@ -1,6 +1,6 @@
 // yt - A fully featured command line YouTube client
 //
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
 // SPDX-License-Identifier: GPL-3.0-or-later
 //
 // This file is part of Yt.
@@ -8,461 +8,322 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
-#![allow(unsafe_op_in_unsafe_fn)]
-#![allow(clippy::missing_errors_doc)]
+//! The `yt_dlp` interface is completely contained in the [`YoutubeDL`] structure.
 
-use std::env;
-use std::io::stdout;
-use std::{fs::File, io::Write};
+use std::path::PathBuf;
 
-use std::{path::PathBuf, sync::Once};
-
-use crate::{duration::Duration, logging::setup_logging, wrapper::info_json::InfoJson};
-
-use bytes::Bytes;
-use log::{info, log_enabled, Level};
-use pyo3::types::{PyString, PyTuple, PyTupleMethods};
-use pyo3::{
-    pyfunction,
-    types::{PyAnyMethods, PyDict, PyDictMethods, PyList, PyListMethods, PyModule},
-    wrap_pyfunction, Bound, PyAny, PyResult, Python,
+use indexmap::IndexMap;
+use log::info;
+use rustpython::vm::{
+    Interpreter, PyObjectRef, PyRef, VirtualMachine,
+    builtins::{PyDict, PyList, PyStr},
+    function::{FuncArgs, KwArgs, PosArgs},
 };
-use serde::Serialize;
-use serde_json::{Map, Value};
 use url::Url;
 
-pub mod duration;
-pub mod logging;
-pub mod wrapper;
-
-#[cfg(test)]
-mod tests;
-
-/// Synchronisation helper, to ensure that we don't setup the logger multiple times
-static SYNC_OBJ: Once = Once::new();
-
-/// Add a logger to the yt-dlp options.
-/// If you have an logger set (i.e. for rust), than this will log to rust
-///
-/// # Panics
-/// This should never panic.
-pub fn add_logger_and_sig_handler<'a>(
-    opts: Bound<'a, PyDict>,
-    py: Python<'_>,
-) -> PyResult<Bound<'a, PyDict>> {
-    setup_logging(py, "yt_dlp")?;
-
-    let logging = PyModule::import(py, "logging")?;
-    let ytdl_logger = logging.call_method1("getLogger", ("yt_dlp",))?;
-
-    // Ensure that all events are logged by setting the log level to NOTSET (we filter on rust's side)
-    // Also use this static, to ensure that we don't configure the logger every time
-    SYNC_OBJ.call_once(|| {
-        // Disable the SIGINT (Ctrl+C) handler, python installs.
-        // This allows the user to actually stop the application with Ctrl+C.
-        // This is here because it can only be run in the main thread and this was here already.
-        py.run(
-            c"\
-import signal
-signal.signal(signal.SIGINT, signal.SIG_DFL)",
-            None,
-            None,
-        )
-        .expect("This code should always work");
-
-        let config_opts = PyDict::new(py);
-        config_opts
-            .set_item("level", 0)
-            .expect("Setting this item should always work");
-
-        logging
-            .call_method("basicConfig", (), Some(&config_opts))
-            .expect("This method exists");
-    });
-
-    // This was taken from `ytcc`, I don't think it is still applicable
-    // ytdl_logger.setattr("propagate", false)?;
-    // let logging_null_handler = logging.call_method0("NullHandler")?;
-    // ytdl_logger.setattr("addHandler", logging_null_handler)?;
-
-    opts.set_item("logger", ytdl_logger).expect("Should work");
-
-    Ok(opts)
+use crate::{
+    info_json::{InfoJson, json_dumps, json_loads},
+    python_error::PythonError,
+};
+
+pub mod info_json;
+pub mod options;
+pub mod post_processors;
+pub mod progress_hook;
+pub mod python_error;
+
+mod logging;
+mod package_hacks;
+
+#[macro_export]
+macro_rules! json_get {
+    ($value:expr, $name:literal, $into:ident) => {{
+        match $value.get($name) {
+            Some(val) => $crate::json_cast!(val, $into),
+            None => panic!(
+                concat!(
+                    "Expected '",
+                    $name,
+                    "' to be a key for the'",
+                    stringify!($value),
+                    "' object: {:#?}"
+                ),
+                $value
+            ),
+        }
+    }};
 }
 
-#[pyfunction]
-#[allow(clippy::too_many_lines)]
-#[allow(clippy::missing_panics_doc)]
-#[allow(clippy::items_after_statements)]
-#[allow(
-    clippy::cast_possible_truncation,
-    clippy::cast_sign_loss,
-    clippy::cast_precision_loss
-)]
-pub fn progress_hook(py: Python<'_>, input: &Bound<'_, PyDict>) -> PyResult<()> {
-    // Only add the handler, if the log-level is higher than Debug (this avoids covering debug
-    // messages).
-    if log_enabled!(Level::Debug) {
-        return Ok(());
-    }
+#[macro_export]
+macro_rules! json_cast {
+    ($value:expr, $into:ident) => {{
+        match $value.$into() {
+            Some(result) => result,
+            None => panic!(
+                concat!(
+                    "Expected to be able to cast value ({:#?}) ",
+                    stringify!($into)
+                ),
+                $value
+            ),
+        }
+    }};
+}
 
-    // ANSI ESCAPE CODES Wrappers {{{
-    // see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
-    const CSI: &str = "\x1b[";
-    fn clear_whole_line() {
-        print!("{CSI}2K");
-    }
-    fn move_to_col(x: usize) {
-        print!("{CSI}{x}G");
-    }
-    // }}}
-
-    let input: Map<String, Value> = serde_json::from_str(&json_dumps(
-        py,
-        input
-            .downcast::<PyAny>()
-            .expect("Will always work")
-            .to_owned(),
-    )?)
-    .expect("Python should always produce valid json");
-
-    macro_rules! get {
-        (@interrogate $item:ident, $type_fun:ident, $get_fun:ident, $name:expr) => {{
-            let a = $item.get($name).expect(concat!(
-                "The field '",
-                stringify!($name),
-                "' should exist."
-            ));
-
-            if a.$type_fun() {
-                a.$get_fun().expect(
-                    "The should have been checked in the if guard, so unpacking here is fine",
-                )
-            } else {
-                panic!(
-                    "Value {} => \n{}\n is not of type: {}",
-                    $name,
-                    a,
-                    stringify!($type_fun)
-                );
-            }
-        }};
+/// The core of the `yt_dlp` interface.
+pub struct YoutubeDL {
+    interpreter: Interpreter,
+    youtube_dl_class: PyObjectRef,
+    yt_dlp_module: PyObjectRef,
+    options: serde_json::Map<String, serde_json::Value>,
+}
 
-        ($type_fun:ident, $get_fun:ident, $name1:expr, $name2:expr) => {{
-            let a = get! {@interrogate input, is_object, as_object, $name1};
-            let b = get! {@interrogate a, $type_fun, $get_fun, $name2};
-            b
-        }};
+impl std::fmt::Debug for YoutubeDL {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        // TODO(@bpeetz): Use something useful here. <2025-06-13>
+        f.write_str("YoutubeDL")
+    }
+}
 
-        ($type_fun:ident, $get_fun:ident, $name:expr) => {{
-            get! {@interrogate input, $type_fun, $get_fun, $name}
-        }};
+impl YoutubeDL {
+    /// Fetch the underlying `yt_dlp` and `python` version.
+    ///
+    ///
+    /// # Panics
+    ///
+    /// If `yt_dlp` changed their location or type of `__version__`.
+    pub fn version(&self) -> (String, String) {
+        let yt_dlp: PyRef<PyStr> = self.interpreter.enter_and_expect(
+            |vm| {
+                let version_module = self.yt_dlp_module.get_attr("version", vm)?;
+                let version = version_module.get_attr("__version__", vm)?;
+                let version = version.downcast().expect("This should always be a string");
+                Ok(version)
+            },
+            "yt_dlp version location has changed",
+        );
+
+        let python: PyRef<PyStr> = self.interpreter.enter_and_expect(
+            |vm| {
+                let version_module = vm.import("sys", 0)?;
+                let version = version_module.get_attr("version", vm)?;
+                let version = version.downcast().expect("This should always be a string");
+                Ok(version)
+            },
+            "python version location has changed",
+        );
+
+        (yt_dlp.to_string(), python.to_string())
     }
 
-    macro_rules! default_get {
-        (@interrogate $item:ident, $default:expr, $get_fun:ident, $name:expr) => {{
-            let a = if let Some(field) = $item.get($name) {
-                field.$get_fun().unwrap_or($default)
+    /// Download a given list of URLs.
+    /// Returns the paths they were downloaded to.
+    ///
+    /// # Errors
+    /// If one of the downloads error.
+    pub fn download(&self, urls: &[Url]) -> Result<Vec<PathBuf>, extract_info::Error> {
+        let mut out_paths = Vec::with_capacity(urls.len());
+
+        for url in urls {
+            info!("Started downloading url: '{url}'");
+            let info_json = self.extract_info(url, true, true)?;
+
+            // Try to work around yt-dlp type weirdness
+            let result_string = if let Some(filename) = info_json.get("filename") {
+                PathBuf::from(json_cast!(filename, as_str))
             } else {
-                $default
+                PathBuf::from(json_get!(
+                    json_cast!(
+                        json_get!(info_json, "requested_downloads", as_array)[0],
+                        as_object
+                    ),
+                    "filename",
+                    as_str
+                ))
             };
-            a
-        }};
-
-        ($get_fun:ident, $default:expr, $name1:expr, $name2:expr) => {{
-            let a = get! {@interrogate input, is_object, as_object, $name1};
-            let b = default_get! {@interrogate a, $default, $get_fun, $name2};
-            b
-        }};
-
-        ($get_fun:ident, $default:expr, $name:expr) => {{
-            default_get! {@interrogate input, $default, $get_fun, $name}
-        }};
-    }
 
-    macro_rules! c {
-        ($color:expr, $format:expr) => {
-            format!("\x1b[{}m{}\x1b[0m", $color, $format)
-        };
-    }
-
-    fn format_bytes(bytes: u64) -> String {
-        let bytes = Bytes::new(bytes);
-        bytes.to_string()
-    }
+            out_paths.push(result_string);
+            info!("Finished downloading url");
+        }
 
-    fn format_speed(speed: f64) -> String {
-        #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
-        let bytes = Bytes::new(speed.floor() as u64);
-        format!("{bytes}/s")
+        Ok(out_paths)
     }
 
-    let get_title = |add_extension: bool| -> String {
-        match get! {is_string, as_str, "info_dict", "ext"} {
-            "vtt" => {
-                format!(
-                    "Subtitles ({})",
-                    default_get! {as_str, "<No Subtitle Language>", "info_dict", "name"}
-                )
-            }
-            title_extension @ ("webm" | "mp4" | "m4a") => {
-                if add_extension {
-                    format!(
-                        "{} ({})",
-                        default_get! { as_str, "<No title>", "info_dict", "title"},
-                        title_extension
-                    )
+    /// `extract_info(self, url, download=True, ie_key=None, extra_info=None, process=True, force_generic_extractor=False)`
+    ///
+    /// Extract and return the information dictionary of the URL
+    ///
+    /// Arguments:
+    /// - `url`          URL to extract
+    ///
+    /// Keyword arguments:
+    /// :`download`     Whether to download videos
+    /// :`process`      Whether to resolve all unresolved references (URLs, playlist items).
+    ///                 Must be True for download to work
+    ///
+    /// # Panics
+    /// If expectations about python fail to hold.
+    ///
+    /// # Errors
+    /// If python operations fail.
+    pub fn extract_info(
+        &self,
+        url: &Url,
+        download: bool,
+        process: bool,
+    ) -> Result<InfoJson, extract_info::Error> {
+        self.interpreter.enter(|vm| {
+            let pos_args = PosArgs::new(vec![vm.new_pyobj(url.to_string())]);
+
+            let kw_args = KwArgs::new({
+                let mut map = IndexMap::new();
+                map.insert("download".to_owned(), vm.new_pyobj(download));
+                map.insert("process".to_owned(), vm.new_pyobj(process));
+                map
+            });
+
+            let fun_args = FuncArgs::new(pos_args, kw_args);
+
+            let inner = self
+                .youtube_dl_class
+                .get_attr("extract_info", vm)
+                .map_err(|exc| PythonError::from_exception(vm, &exc))?;
+            let result = inner
+                .call_with_args(fun_args, vm)
+                .map_err(|exc| PythonError::from_exception(vm, &exc))?
+                .downcast::<PyDict>()
+                .expect("This is a dict");
+
+            // Resolve the generator object
+            if let Ok(generator) = result.get_item("entries", vm) {
+                if generator.payload_is::<PyList>() {
+                    // already resolved. Do nothing
                 } else {
-                    default_get! { as_str, "<No title>", "info_dict", "title"}.to_owned()
-                }
-            }
-            other => panic!("The extension '{other}' is not yet implemented"),
-        }
-    };
-
-    match get! {is_string, as_str, "status"} {
-        "downloading" => {
-            let elapsed = default_get! {as_f64, 0.0f64, "elapsed"};
-            let eta = default_get! {as_f64, 0.0, "eta"};
-            let speed = default_get! {as_f64, 0.0, "speed"};
-
-            let downloaded_bytes = get! {is_u64, as_u64, "downloaded_bytes"};
-            let (total_bytes, bytes_is_estimate): (u64, &'static str) = {
-                let total_bytes = default_get!(as_u64, 0, "total_bytes");
-                if total_bytes == 0 {
-                    let maybe_estimate = default_get!(as_u64, 0, "total_bytes_estimate");
-
-                    if maybe_estimate == 0 {
-                        // The download speed should be in bytes per second and the eta in seconds.
-                        // Thus multiplying them gets us the raw bytes (which were estimated by `yt_dlp`, from their `info.json`)
-                        let bytes_still_needed = (speed * eta).ceil() as u64;
-
-                        (downloaded_bytes + bytes_still_needed, "~")
-                    } else {
-                        (maybe_estimate, "~")
+                    let max_backlog = self.options.get("playlistend").map_or(10000, |value| {
+                        usize::try_from(value.as_u64().expect("Works")).expect("Should work")
+                    });
+
+                    let mut out = vec![];
+                    let next = generator
+                        .get_attr("__next__", vm)
+                        .map_err(|exc| PythonError::from_exception(vm, &exc))?;
+                    while let Ok(output) = next.call((), vm) {
+                        out.push(output);
+
+                        if out.len() == max_backlog {
+                            break;
+                        }
                     }
-                } else {
-                    (total_bytes, "")
+                    result
+                        .set_item("entries", vm.new_pyobj(out), vm)
+                        .map_err(|exc| PythonError::from_exception(vm, &exc))?;
                 }
-            };
-            let percent: f64 = {
-                if total_bytes == 0 {
-                    100.0
-                } else {
-                    (downloaded_bytes as f64 / total_bytes as f64) * 100.0
-                }
-            };
-
-            clear_whole_line();
-            move_to_col(1);
-
-            print!(
-                "'{}' [{}/{} at {}] -> [{} of {}{} {}] ",
-                c!("34;1", get_title(true)),
-                c!("33;1", Duration::from(Some(elapsed))),
-                c!("33;1", Duration::from(Some(eta))),
-                c!("32;1", format_speed(speed)),
-                c!("31;1", format_bytes(downloaded_bytes)),
-                c!("31;1", bytes_is_estimate),
-                c!("31;1", format_bytes(total_bytes)),
-                c!("36;1", format!("{:.02}%", percent))
-            );
-            stdout().flush()?;
-        }
-        "finished" => {
-            println!("-> Finished downloading.");
-        }
-        "error" => {
-            panic!("-> Error while downloading: {}", get_title(true))
-        }
-        other => unreachable!("'{other}' should not be a valid state!"),
-    };
-
-    Ok(())
-}
-
-pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python<'_>) -> PyResult<Bound<'a, PyDict>> {
-    if let Some(hooks) = opts.get_item("progress_hooks")? {
-        let hooks = hooks.downcast::<PyList>()?;
-        hooks.append(wrap_pyfunction!(progress_hook, py)?)?;
-
-        opts.set_item("progress_hooks", hooks)?;
-    } else {
-        // No hooks are set yet
-        let hooks_list = PyList::new(py, &[wrap_pyfunction!(progress_hook, py)?])?;
-
-        opts.set_item("progress_hooks", hooks_list)?;
-    }
-
-    Ok(opts)
-}
-
-/// `extract_info(self, url, download=True, ie_key=None, extra_info=None, process=True, force_generic_extractor=False)`
-///
-/// Extract and return the information dictionary of the URL
-///
-/// Arguments:
-/// @param url          URL to extract
-///
-/// Keyword arguments:
-/// @param download     Whether to download videos
-/// @param process      Whether to resolve all unresolved references (URLs, playlist items).
-///                     Must be True for download to work
-/// @param `ie_key`       Use only the extractor with this key
-///
-/// @param `extra_info`   Dictionary containing the extra values to add to the info (For internal use only)
-/// @`force_generic_extractor`  Force using the generic extractor (Deprecated; use `ie_key`='Generic')
-#[allow(clippy::unused_async)]
-#[allow(clippy::missing_panics_doc)]
-pub async fn extract_info(
-    yt_dlp_opts: &Map<String, Value>,
-    url: &Url,
-    download: bool,
-    process: bool,
-) -> PyResult<InfoJson> {
-    Python::with_gil(|py| {
-        let opts = json_map_to_py_dict(yt_dlp_opts, py)?;
-
-        let instance = get_yt_dlp(py, opts)?;
-        let args = (url.as_str(),);
-
-        let kwargs = PyDict::new(py);
-        kwargs.set_item("download", download)?;
-        kwargs.set_item("process", process)?;
-
-        let result = instance.call_method("extract_info", args, Some(&kwargs))?;
-
-        // Remove the `<generator at 0xsome_hex>`, by setting it to null
-        if !process {
-            result.set_item("entries", ())?;
-        }
-
-        let result_str = json_dumps(py, result)?;
-
-        if let Ok(confirm) = env::var("YT_STORE_INFO_JSON") {
-            if confirm == "yes" {
-                let mut file = File::create("output.info.json")?;
-                write!(file, "{result_str}").unwrap();
             }
-        }
 
-        Ok(serde_json::from_str(&result_str)
-            .expect("Python should be able to produce correct json"))
-    })
-}
-
-/// # Panics
-/// Only if python fails to return a valid URL.
-pub fn unsmuggle_url(smug_url: &Url) -> PyResult<Url> {
-    Python::with_gil(|py| {
-        let utils = get_yt_dlp_utils(py)?;
-        let url = utils
-            .call_method1("unsmuggle_url", (smug_url.as_str(),))?
-            .downcast::<PyTuple>()?
-            .get_item(0)?;
-
-        let url: Url = url
-            .downcast::<PyString>()?
-            .to_string()
-            .parse()
-            .expect("Python should be able to return a valid url");
-
-        Ok(url)
-    })
-}
+            let result = self.prepare_info_json(result, vm)?;
 
-/// Download a given list of URLs.
-/// Returns the paths they were downloaded to.
-///
-/// # Panics
-/// Only if `yt_dlp` changes their `info_json` schema.
-pub async fn download(
-    urls: &[Url],
-    download_options: &Map<String, Value>,
-) -> PyResult<Vec<PathBuf>> {
-    let mut out_paths = Vec::with_capacity(urls.len());
-
-    for url in urls {
-        info!("Started downloading url: '{}'", url);
-        let info_json = extract_info(download_options, url, true, true).await?;
-
-        // Try to work around yt-dlp type weirdness
-        let result_string = if let Some(filename) = info_json.filename {
-            filename
-        } else {
-            info_json.requested_downloads.expect("This must exist")[0]
-                .filename
-                .clone()
-        };
-
-        out_paths.push(result_string);
-        info!("Finished downloading url: '{}'", url);
+            Ok(result)
+        })
     }
 
-    Ok(out_paths)
-}
-
-fn json_map_to_py_dict<'a>(
-    map: &Map<String, Value>,
-    py: Python<'a>,
-) -> PyResult<Bound<'a, PyDict>> {
-    let json_string = serde_json::to_string(&map).expect("This must always work");
-
-    let python_dict = json_loads(py, json_string)?;
-
-    Ok(python_dict)
-}
-
-fn json_dumps(py: Python<'_>, input: Bound<'_, PyAny>) -> PyResult<String> {
-    //     json.dumps(yt_dlp.sanitize_info(input))
-
-    let yt_dlp = get_yt_dlp(py, PyDict::new(py))?;
-    let sanitized_result = yt_dlp.call_method1("sanitize_info", (input,))?;
-
-    let json = PyModule::import(py, "json")?;
-    let dumps = json.getattr("dumps")?;
+    /// Take the (potentially modified) result of the information extractor (i.e.,
+    /// [`Self::extract_info`] with `process` and `download` set to false)
+    /// and resolve all unresolved references (URLs,
+    /// playlist items).
+    ///
+    /// It will also download the videos if 'download' is true.
+    /// Returns the resolved `ie_result`.
+    ///
+    /// # Panics
+    /// If expectations about python fail to hold.
+    ///
+    /// # Errors
+    /// If python operations fail.
+    pub fn process_ie_result(
+        &self,
+        ie_result: InfoJson,
+        download: bool,
+    ) -> Result<InfoJson, process_ie_result::Error> {
+        self.interpreter.enter(|vm| {
+            let pos_args = PosArgs::new(vec![vm.new_pyobj(json_loads(ie_result, vm))]);
+
+            let kw_args = KwArgs::new({
+                let mut map = IndexMap::new();
+                map.insert("download".to_owned(), vm.new_pyobj(download));
+                map
+            });
+
+            let fun_args = FuncArgs::new(pos_args, kw_args);
+
+            let inner = self
+                .youtube_dl_class
+                .get_attr("process_ie_result", vm)
+                .map_err(|exc| PythonError::from_exception(vm, &exc))?;
+            let result = inner
+                .call_with_args(fun_args, vm)
+                .map_err(|exc| PythonError::from_exception(vm, &exc))?
+                .downcast::<PyDict>()
+                .expect("This is a dict");
+
+            let result = self.prepare_info_json(result, vm)?;
+
+            Ok(result)
+        })
+    }
 
-    let output = dumps.call1((sanitized_result,))?;
+    fn prepare_info_json(
+        &self,
+        info: PyRef<PyDict>,
+        vm: &VirtualMachine,
+    ) -> Result<InfoJson, prepare::Error> {
+        let sanitize = self
+            .youtube_dl_class
+            .get_attr("sanitize_info", vm)
+            .map_err(|exc| PythonError::from_exception(vm, &exc))?;
 
-    let output_str = output.extract::<String>()?;
+        let value = sanitize
+            .call((info,), vm)
+            .map_err(|exc| PythonError::from_exception(vm, &exc))?;
 
-    Ok(output_str)
-}
+        let result = value.downcast::<PyDict>().expect("This should stay a dict");
 
-fn json_loads_str<T: Serialize>(py: Python<'_>, input: T) -> PyResult<Bound<'_, PyDict>> {
-    let string = serde_json::to_string(&input).expect("Correct json must be pased");
-
-    json_loads(py, string)
+        Ok(json_dumps(result, vm))
+    }
 }
 
-fn json_loads(py: Python<'_>, input: String) -> PyResult<Bound<'_, PyDict>> {
-    //     json.loads(input)
+#[allow(missing_docs)]
+pub mod process_ie_result {
+    use crate::{prepare, python_error::PythonError};
 
-    let json = PyModule::import(py, "json")?;
-    let dumps = json.getattr("loads")?;
+    #[derive(Debug, thiserror::Error)]
+    pub enum Error {
+        #[error(transparent)]
+        Python(#[from] PythonError),
 
-    let output = dumps.call1((input,))?;
-
-    Ok(output
-        .downcast::<PyDict>()
-        .expect("This should always be a PyDict")
-        .clone())
+        #[error("Failed to prepare the info json")]
+        InfoJsonPrepare(#[from] prepare::Error),
+    }
 }
+#[allow(missing_docs)]
+pub mod extract_info {
+    use crate::{prepare, python_error::PythonError};
 
-fn get_yt_dlp_utils(py: Python<'_>) -> PyResult<Bound<'_, PyAny>> {
-    let yt_dlp = PyModule::import(py, "yt_dlp")?;
-    let utils = yt_dlp.getattr("utils")?;
+    #[derive(Debug, thiserror::Error)]
+    pub enum Error {
+        #[error(transparent)]
+        Python(#[from] PythonError),
 
-    Ok(utils)
+        #[error("Failed to prepare the info json")]
+        InfoJsonPrepare(#[from] prepare::Error),
+    }
 }
-fn get_yt_dlp<'a>(py: Python<'a>, opts: Bound<'a, PyDict>) -> PyResult<Bound<'a, PyAny>> {
-    // Unconditionally set a logger
-    let opts = add_logger_and_sig_handler(opts, py)?;
-    let opts = add_hooks(opts, py)?;
-
-    let yt_dlp = PyModule::import(py, "yt_dlp")?;
-    let youtube_dl = yt_dlp.call_method1("YoutubeDL", (opts,))?;
-
-    Ok(youtube_dl)
+#[allow(missing_docs)]
+pub mod prepare {
+    use crate::python_error::PythonError;
+
+    #[derive(Debug, thiserror::Error)]
+    pub enum Error {
+        #[error(transparent)]
+        Python(#[from] PythonError),
+    }
 }
diff --git a/crates/yt_dlp/src/logging.rs b/crates/yt_dlp/src/logging.rs
index 670fc1c..112836e 100644
--- a/crates/yt_dlp/src/logging.rs
+++ b/crates/yt_dlp/src/logging.rs
@@ -10,33 +10,66 @@
 
 // This file is taken from: https://github.com/dylanbstorey/pyo3-pylogger/blob/d89e0d6820ebc4f067647e3b74af59dbc4941dd5/src/lib.rs
 // It is licensed under the Apache 2.0 License, copyright up to 2024 by Dylan Storey
-// It was modified by Benedikt Peetz 2024
+// It was modified by Benedikt Peetz 2024, 2025
+
+use log::{Level, MetadataBuilder, Record, logger};
+use rustpython::vm::{
+    PyObjectRef, PyRef, PyResult, VirtualMachine,
+    builtins::{PyInt, PyStr},
+    convert::ToPyObject,
+    function::FuncArgs,
+};
 
-// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
-#![allow(unsafe_op_in_unsafe_fn)]
+/// Consume a Python `logging.LogRecord` and emit a Rust `Log` instead.
+fn host_log(mut input: FuncArgs, vm: &VirtualMachine) -> PyResult<()> {
+    let record = input.args.remove(0);
+    let rust_target = {
+        let base: PyRef<PyStr> = input.args.remove(0).downcast().expect("Should be a string");
+        base.as_str().to_owned()
+    };
 
-use std::ffi::CString;
+    let level = {
+        let level: PyRef<PyInt> = record
+            .get_attr("levelno", vm)?
+            .downcast()
+            .expect("Should always be an int");
+        level.as_u32_mask()
+    };
+    let message = {
+        let get_message = record.get_attr("getMessage", vm)?;
+        let message: PyRef<PyStr> = get_message
+            .call((), vm)?
+            .downcast()
+            .expect("Downcasting works");
+
+        message.as_str().to_owned()
+    };
 
-use log::{logger, Level, MetadataBuilder, Record};
-use pyo3::{
-    prelude::{PyAnyMethods, PyListMethods, PyModuleMethods},
-    pyfunction, wrap_pyfunction, Bound, PyAny, PyResult, Python,
-};
+    let pathname = {
+        let pathname: PyRef<PyStr> = record
+            .get_attr("pathname", vm)?
+            .downcast()
+            .expect("Is a string");
 
-/// Consume a Python `logging.LogRecord` and emit a Rust `Log` instead.
-#[allow(clippy::needless_pass_by_value)]
-#[pyfunction]
-fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
-    let level = record.getattr("levelno")?;
-    let message = record.getattr("getMessage")?.call0()?.to_string();
-    let pathname = record.getattr("pathname")?.to_string();
-    let lineno = record
-        .getattr("lineno")?
-        .to_string()
-        .parse::<u32>()
-        .expect("This should always be a u32");
-
-    let logger_name = record.getattr("name")?.to_string();
+        pathname.as_str().to_owned()
+    };
+
+    let lineno = {
+        let lineno: PyRef<PyInt> = record
+            .get_attr("lineno", vm)?
+            .downcast()
+            .expect("Is a number");
+
+        lineno.as_u32_mask()
+    };
+
+    let logger_name = {
+        let name: PyRef<PyStr> = record
+            .get_attr("name", vm)?
+            .downcast()
+            .expect("Should be a string");
+        name.as_str().to_owned()
+    };
 
     let full_target: Option<String> = if logger_name.trim().is_empty() || logger_name == "root" {
         None
@@ -47,25 +80,25 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
         Some(format!("{rust_target}::{logger_name}"))
     };
 
-    let target = full_target.as_deref().unwrap_or(rust_target);
+    let target = full_target.as_deref().unwrap_or(&rust_target);
 
     // error
-    let error_metadata = if level.ge(40u8)? {
+    let error_metadata = if level >= 40 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Error)
             .build()
-    } else if level.ge(30u8)? {
+    } else if level >= 30 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Warn)
             .build()
-    } else if level.ge(20u8)? {
+    } else if level >= 20 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Info)
             .build()
-    } else if level.ge(10u8)? {
+    } else if level >= 10 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Debug)
@@ -97,13 +130,24 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
 /// # Panics
 /// Only if internal assertions fail.
 #[allow(clippy::module_name_repetitions)]
-pub fn setup_logging(py: Python<'_>, target: &str) -> PyResult<()> {
-    let logging = py.import("logging")?;
+pub(super) fn setup_logging(vm: &VirtualMachine, target: &str) -> PyResult<PyObjectRef> {
+    let logging = vm.import("logging", 0)?;
 
-    logging.setattr("host_log", wrap_pyfunction!(host_log, &logging)?)?;
+    let scope = vm.new_scope_with_builtins();
 
-    py.run(
-        CString::new(format!(
+    for (key, value) in logging.dict().expect("Should be a dict") {
+        let key: PyRef<PyStr> = key.downcast().expect("Is a string");
+
+        scope.globals.set_item(key.as_str(), value, vm)?;
+    }
+    scope
+        .globals
+        .set_item("host_log", vm.new_function("host_log", host_log).into(), vm)?;
+
+    let local_scope = scope.clone();
+    vm.run_code_string(
+        local_scope,
+        format!(
             r#"
 class HostHandler(Handler):
     def __init__(self, level=0):
@@ -118,15 +162,10 @@ def basicConfig(*pargs, **kwargs):
         kwargs["handlers"] = [HostHandler()]
     return oldBasicConfig(*pargs, **kwargs)
 "#
-        ))
-        .expect("This is hardcoded")
-        .as_c_str(),
-        Some(&logging.dict()),
-        None,
+        )
+        .as_str(),
+        "<embedded logging inintializing code>".to_owned(),
     )?;
 
-    let all = logging.index()?;
-    all.append("HostHandler")?;
-
-    Ok(())
+    Ok(scope.globals.to_pyobject(vm))
 }
diff --git a/crates/yt_dlp/src/options.rs b/crates/yt_dlp/src/options.rs
new file mode 100644
index 0000000..dc3c154
--- /dev/null
+++ b/crates/yt_dlp/src/options.rs
@@ -0,0 +1,286 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::env;
+
+use indexmap::IndexMap;
+use log::{Level, debug, error, log_enabled};
+use rustpython::{
+    InterpreterConfig,
+    vm::{
+        self, PyObjectRef, PyRef, PyResult, VirtualMachine,
+        builtins::{PyBaseException, PyStr},
+        function::{FuncArgs, KwArgs, PosArgs},
+    },
+};
+
+use crate::{
+    YoutubeDL, json_loads, logging::setup_logging, package_hacks, post_processors,
+    python_error::process_exception,
+};
+
+/// Wrap your function with [`mk_python_function`].
+pub type ProgressHookFunction = fn(input: FuncArgs, vm: &VirtualMachine);
+
+pub type PostProcessorFunction = fn(vm: &VirtualMachine) -> PyResult<PyObjectRef>;
+
+/// Options, that are used to customize the download behaviour.
+///
+/// In the future, this might get a Builder api.
+///
+/// See `help(yt_dlp.YoutubeDL())` from python for a full list of available options.
+#[derive(Default, Debug)]
+pub struct YoutubeDLOptions {
+    options: serde_json::Map<String, serde_json::Value>,
+    progress_hook: Option<ProgressHookFunction>,
+    post_processors: Vec<PostProcessorFunction>,
+}
+
+impl YoutubeDLOptions {
+    #[must_use]
+    pub fn new() -> Self {
+        let me = Self {
+            options: serde_json::Map::new(),
+            progress_hook: None,
+            post_processors: vec![],
+        };
+
+        me.with_post_processor(post_processors::dearrow::process)
+    }
+
+    #[must_use]
+    pub fn set(self, key: impl Into<String>, value: impl Into<serde_json::Value>) -> Self {
+        let mut options = self.options;
+        options.insert(key.into(), value.into());
+
+        Self { options, ..self }
+    }
+
+    #[must_use]
+    pub fn with_progress_hook(self, progress_hook: ProgressHookFunction) -> Self {
+        if let Some(_previous_hook) = self.progress_hook {
+            todo!()
+        } else {
+            Self {
+                progress_hook: Some(progress_hook),
+                ..self
+            }
+        }
+    }
+
+    #[must_use]
+    pub fn with_post_processor(mut self, pp: PostProcessorFunction) -> Self {
+        self.post_processors.push(pp);
+        self
+    }
+
+    /// # Errors
+    /// If the underlying [`YoutubeDL::from_options`] errors.
+    pub fn build(self) -> Result<YoutubeDL, build::Error> {
+        YoutubeDL::from_options(self)
+    }
+
+    #[must_use]
+    pub fn from_json_options(options: serde_json::Map<String, serde_json::Value>) -> Self {
+        Self {
+            options,
+            ..Self::new()
+        }
+    }
+
+    #[must_use]
+    pub fn get(&self, key: &str) -> Option<&serde_json::Value> {
+        self.options.get(key)
+    }
+}
+
+impl YoutubeDL {
+    /// Construct this instance from options.
+    ///
+    /// # Panics
+    /// If `yt_dlp` changed their interface.
+    ///
+    /// # Errors
+    /// If a python call fails.
+    #[allow(clippy::too_many_lines)]
+    pub fn from_options(options: YoutubeDLOptions) -> Result<Self, build::Error> {
+        let mut settings = vm::Settings::default();
+        if let Ok(python_path) = env::var("PYTHONPATH") {
+            for path in python_path.split(':') {
+                settings.path_list.push(path.to_owned());
+            }
+        } else {
+            error!(
+                "No PYTHONPATH found or invalid utf8. \
+                This means, that you probably did not \
+                supply a yt_dlp python package!"
+            );
+        }
+
+        settings.install_signal_handlers = false;
+
+        // NOTE(@bpeetz): Another value leads to an internal codegen error. <2025-06-13>
+        settings.optimize = 0;
+
+        settings.isolated = true;
+
+        let interpreter = InterpreterConfig::new()
+            .init_stdlib()
+            .settings(settings)
+            .interpreter();
+
+        let output_options = options.options.clone();
+
+        let (yt_dlp_module, youtube_dl_class) = match interpreter.enter(|vm| {
+            {
+                // Add missing (and required) values to the stdlib
+                package_hacks::urllib3::apply_hacks(vm)?;
+            }
+
+            let yt_dlp_module = vm.import("yt_dlp", 0)?;
+            let class = yt_dlp_module.get_attr("YoutubeDL", vm)?;
+
+            let opts = json_loads(options.options, vm);
+
+            {
+                // Setup the progress hook
+                if let Some(function) = options.progress_hook {
+                    opts.get_or_insert(vm, vm.new_pyobj("progress_hooks"), || {
+                        let hook: PyObjectRef = vm.new_function("progress_hook", function).into();
+                        vm.new_pyobj(vec![hook])
+                    })
+                    .expect("Should work?");
+                }
+            }
+
+            {
+                // Unconditionally set a logger.
+                // Otherwise, yt_dlp will log to stderr.
+
+                /// Is the specified record to be logged? Returns false for no,
+                /// true for yes. Filters can either modify log records in-place or
+                /// return a completely different record instance which will replace
+                /// the original log record in any future processing of the event.
+                fn filter_error_log(mut input: FuncArgs, vm: &VirtualMachine) -> bool {
+                    let record = input.args.remove(0);
+
+                    // Filter out all error logs (they are propagated as rust errors)
+                    let levelname: PyRef<PyStr> = record
+                        .get_attr("levelname", vm)
+                        .expect("This should exist")
+                        .downcast()
+                        .expect("This should be a String");
+
+                    let return_value = levelname.as_str() != "ERROR";
+
+                    if log_enabled!(Level::Debug) && !return_value {
+                        let message: String = {
+                            let get_message = record.get_attr("getMessage", vm).expect("Is set");
+                            let message: PyRef<PyStr> = get_message
+                                .call((), vm)
+                                .expect("Can be called")
+                                .downcast()
+                                .expect("Downcasting works");
+
+                            message.as_str().to_owned()
+                        };
+
+                        debug!("Swollowed error message: '{message}'");
+                    }
+                    return_value
+                }
+
+                let logging = setup_logging(vm, "yt_dlp")?;
+                let ytdl_logger = {
+                    let get_logger = logging.get_item("getLogger", vm)?;
+                    get_logger.call(("yt_dlp",), vm)?
+                };
+
+                {
+                    let args = FuncArgs::new(
+                        PosArgs::new(vec![]),
+                        KwArgs::new({
+                            let mut map = IndexMap::new();
+                            // Ensure that all events are logged by setting
+                            // the log level to NOTSET (we filter on rust's side)
+                            map.insert("level".to_owned(), vm.new_pyobj(0));
+                            map
+                        }),
+                    );
+
+                    let basic_config = logging.get_item("basicConfig", vm)?;
+                    basic_config.call(args, vm)?;
+                }
+
+                {
+                    let add_filter = ytdl_logger.get_attr("addFilter", vm)?;
+                    add_filter.call(
+                        (vm.new_function("yt_dlp_error_filter", filter_error_log),),
+                        vm,
+                    )?;
+                }
+
+                opts.set_item("logger", ytdl_logger, vm)?;
+            }
+
+            let youtube_dl_class = class.call((opts,), vm)?;
+
+            {
+                // Setup the post processors
+
+                let add_post_processor_fun = youtube_dl_class.get_attr("add_post_processor", vm)?;
+
+                for pp in options.post_processors {
+                    let args = {
+                        FuncArgs::new(
+                            PosArgs::new(vec![pp(vm)?]),
+                            KwArgs::new({
+                                let mut map = IndexMap::new();
+                                //  "when" can take any value in yt_dlp.utils.POSTPROCESS_WHEN
+                                map.insert("when".to_owned(), vm.new_pyobj("pre_process"));
+                                map
+                            }),
+                        )
+                    };
+
+                    add_post_processor_fun.call(args, vm)?;
+                }
+            }
+
+            Ok::<_, PyRef<PyBaseException>>((yt_dlp_module, youtube_dl_class))
+        }) {
+            Ok(ok) => Ok(ok),
+            Err(err) => {
+                // TODO(@bpeetz): Do we want to run `interpreter.finalize` here? <2025-06-14>
+                // interpreter.finalize(Some(err));
+                interpreter.enter(|vm| {
+                    let buffer = process_exception(vm, &err);
+                    Err(build::Error::Python(buffer))
+                })
+            }
+        }?;
+
+        Ok(Self {
+            interpreter,
+            youtube_dl_class,
+            yt_dlp_module,
+            options: output_options,
+        })
+    }
+}
+
+#[allow(missing_docs)]
+pub mod build {
+    #[derive(Debug, thiserror::Error)]
+    pub enum Error {
+        #[error("Python threw an exception: {0}")]
+        Python(String),
+    }
+}
diff --git a/crates/yt_dlp/src/package_hacks/mod.rs b/crates/yt_dlp/src/package_hacks/mod.rs
new file mode 100644
index 0000000..53fe323
--- /dev/null
+++ b/crates/yt_dlp/src/package_hacks/mod.rs
@@ -0,0 +1,11 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+pub(super) mod urllib3;
diff --git a/crates/yt_dlp/src/package_hacks/urllib3.rs b/crates/yt_dlp/src/package_hacks/urllib3.rs
new file mode 100644
index 0000000..28ae37a
--- /dev/null
+++ b/crates/yt_dlp/src/package_hacks/urllib3.rs
@@ -0,0 +1,35 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use rustpython::vm::{PyResult, VirtualMachine};
+
+// NOTE(@bpeetz): Remove this, once rust-python supports these features. <2025-06-27>
+pub(crate) fn apply_hacks(vm: &VirtualMachine) -> PyResult<()> {
+    {
+        // Urllib3 tries to import this value, regardless if it is set.
+        let ssl_module = vm.import("ssl", 0)?;
+        ssl_module.set_attr("VERIFY_X509_STRICT", vm.ctx.new_int(0x20), vm)?;
+    }
+
+    {
+        // Urllib3 tries to set the SSLContext.verify_flags value, regardless if it exists or not.
+        // So we need to provide a polyfill.
+
+        let scope = vm.new_scope_with_builtins();
+
+        vm.run_code_string(
+            scope,
+            include_str!("urllib3_polyfill.py"),
+            "<embedded urllib3 polyfill workaround code>".to_owned(),
+        )?;
+    }
+
+    Ok(())
+}
diff --git a/crates/yt_dlp/.cargo/config.toml b/crates/yt_dlp/src/package_hacks/urllib3_polyfill.py
index d84f14d..610fd99 100644
--- a/crates/yt_dlp/.cargo/config.toml
+++ b/crates/yt_dlp/src/package_hacks/urllib3_polyfill.py
@@ -1,6 +1,6 @@
 # yt - A fully featured command line YouTube client
 #
-# Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+# Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
 # SPDX-License-Identifier: GPL-3.0-or-later
 #
 # This file is part of Yt.
@@ -8,5 +8,6 @@
 # You should have received a copy of the License along with this program.
 # If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-[env]
-PYO3_PYTHON = "/nix/store/7xzk119acyws2c4ysygdv66l0grxkr39-python3-3.11.9-env/bin/python3"
+import ssl
+
+ssl.SSLContext.verify_flags = 0
diff --git a/crates/yt_dlp/src/post_processors/dearrow.rs b/crates/yt_dlp/src/post_processors/dearrow.rs
new file mode 100644
index 0000000..3cac745
--- /dev/null
+++ b/crates/yt_dlp/src/post_processors/dearrow.rs
@@ -0,0 +1,184 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use curl::easy::Easy;
+use log::{error, info, warn};
+use rustpython::vm::{
+    PyRef, VirtualMachine,
+    builtins::{PyDict, PyStr},
+};
+use serde::{Deserialize, Serialize};
+
+use crate::{pydict_cast, pydict_get, wrap_post_processor};
+
+wrap_post_processor!("DeArrow", unwrapped_process, process);
+
+/// # Errors
+/// If the API access fails.
+pub fn unwrapped_process(info: PyRef<PyDict>, vm: &VirtualMachine) -> Result<PyRef<PyDict>, Error> {
+    if pydict_get!(@vm, info, "extractor_key", PyStr).as_str() != "Youtube" {
+        warn!("DeArrow: Extractor did not match, exiting.");
+        return Ok(info);
+    }
+
+    let mut output: DeArrowApi = {
+        let output_bytes = {
+            let mut dst = Vec::new();
+
+            let mut easy = Easy::new();
+            easy.url(
+                format!(
+                    "https://sponsor.ajay.app/api/branding?videoID={}",
+                    pydict_get!(@vm, info, "id", PyStr).as_str()
+                )
+                .as_str(),
+            )?;
+
+            let mut transfer = easy.transfer();
+            transfer.write_function(|data| {
+                dst.extend_from_slice(data);
+                Ok(data.len())
+            })?;
+            transfer.perform()?;
+            drop(transfer);
+
+            dst
+        };
+
+        serde_json::from_slice(&output_bytes)?
+    };
+
+    // We pop the titles, so we need this vector reversed.
+    output.titles.reverse();
+
+    let title_len = output.titles.len();
+    let mut iterator = output.titles.clone();
+    let selected = loop {
+        let Some(title) = iterator.pop() else {
+            break false;
+        };
+
+        if (title.locked || title.votes < 1) && title_len > 1 {
+            info!(
+                "DeArrow: Skipping title {:#?}, as it is not good enough",
+                title.value
+            );
+            // Skip titles that are not “good” enough.
+            continue;
+        }
+
+        update_title(&info, &title.value, vm);
+
+        break true;
+    };
+
+    if !selected && title_len != 0 {
+        // No title was selected, even though we had some titles.
+        // Just pick the first one in this case.
+        update_title(&info, &output.titles[0].value, vm);
+    }
+
+    Ok(info)
+}
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error("Failed to access the DeArrow api: {0}")]
+    Get(#[from] curl::Error),
+
+    #[error("Failed to deserialize a api json return object: {0}")]
+    Deserialize(#[from] serde_json::Error),
+}
+
+fn update_title(info: &PyRef<PyDict>, new_title: &str, vm: &VirtualMachine) {
+    assert!(!info.contains_key("original_title", vm));
+
+    if let Ok(old_title) = info.get_item("title", vm) {
+        warn!(
+            "DeArrow: Updating title from {:#?} to {:#?}",
+            pydict_cast!(@ref old_title, PyStr).as_str(),
+            new_title
+        );
+
+        info.set_item("original_title", old_title, vm)
+            .expect("We checked, it is a new key");
+    } else {
+        warn!("DeArrow: Setting title to {new_title:#?}");
+    }
+
+    let cleaned_title = {
+        // NOTE(@bpeetz): DeArrow uses `>` as a “Don't format the next word” mark.
+        // They should be removed, if one does not use a auto-formatter. <2025-06-16>
+        new_title.replace('>', "")
+    };
+
+    info.set_item("title", vm.new_pyobj(cleaned_title), vm)
+        .expect("This should work?");
+}
+
+#[derive(Serialize, Deserialize)]
+/// See: <https://wiki.sponsor.ajay.app/w/API_Docs/DeArrow>
+struct DeArrowApi {
+    titles: Vec<Title>,
+    thumbnails: Vec<Thumbnail>,
+
+    #[serde(alias = "randomTime")]
+    random_time: Option<f64>,
+
+    #[serde(alias = "videoDuration")]
+    video_duration: Option<f64>,
+
+    #[serde(alias = "casualVotes")]
+    casual_votes: Vec<CasualVote>,
+}
+
+#[derive(Serialize, Deserialize)]
+struct CasualVote {
+    id: String,
+    count: u32,
+    title: String,
+}
+
+#[derive(Serialize, Deserialize, Clone)]
+struct Title {
+    /// Note: Titles will sometimes contain > before a word.
+    /// This tells the auto-formatter to not format a word.
+    /// If you have no auto-formatter, you can ignore this and replace it with an empty string
+    #[serde(alias = "title")]
+    value: String,
+
+    original: bool,
+    votes: u64,
+    locked: bool,
+
+    #[serde(alias = "UUID")]
+    uuid: String,
+
+    /// only present if requested
+    #[serde(alias = "userID")]
+    user_id: Option<String>,
+}
+
+#[derive(Serialize, Deserialize)]
+struct Thumbnail {
+    // null if original is true
+    timestamp: Option<f64>,
+
+    original: bool,
+    votes: u64,
+    locked: bool,
+
+    #[serde(alias = "UUID")]
+    uuid: String,
+
+    /// only present if requested
+    #[serde(alias = "userID")]
+    user_id: Option<String>,
+}
diff --git a/crates/yt_dlp/src/post_processors/mod.rs b/crates/yt_dlp/src/post_processors/mod.rs
new file mode 100644
index 0000000..00b0ad5
--- /dev/null
+++ b/crates/yt_dlp/src/post_processors/mod.rs
@@ -0,0 +1,123 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+pub mod dearrow;
+
+#[macro_export]
+macro_rules! pydict_get {
+    (@$vm:expr, $value:expr, $name:literal, $into:ident) => {{
+        match $value.get_item($name, $vm) {
+            Ok(val) => $crate::pydict_cast!(val, $into),
+            Err(_) => panic!(
+                concat!(
+                    "Expected '",
+                    $name,
+                    "' to be a key for the'",
+                    stringify!($value),
+                    "' py dictionary: {:#?}"
+                ),
+                $value
+            ),
+        }
+    }};
+}
+
+#[macro_export]
+macro_rules! pydict_cast {
+    ($value:expr, $into:ident) => {{
+        match $value.downcast::<$into>() {
+            Ok(result) => result,
+            Err(val) => panic!(
+                concat!(
+                    "Expected to be able to downcast value ({:#?}) as ",
+                    stringify!($into)
+                ),
+                val
+            ),
+        }
+    }};
+    (@ref $value:expr, $into:ident) => {{
+        match $value.downcast_ref::<$into>() {
+            Some(result) => result,
+            None => panic!(
+                concat!(
+                    "Expected to be able to downcast value ({:#?}) as ",
+                    stringify!($into)
+                ),
+                $value
+            ),
+        }
+    }};
+}
+
+#[macro_export]
+macro_rules! wrap_post_processor {
+    ($name:literal, $unwrap:ident, $wrapped:ident) => {
+        use $crate::progress_hook::__priv::vm;
+
+        /// # Errors
+        /// - If the underlying function returns an error.
+        /// - If python operations fail.
+        pub fn $wrapped(vm: &vm::VirtualMachine) -> vm::PyResult<vm::PyObjectRef> {
+            fn actual_processor(
+                mut input: vm::function::FuncArgs,
+                vm: &vm::VirtualMachine,
+            ) -> vm::PyResult<vm::PyRef<vm::builtins::PyDict>> {
+                let input = input
+                    .args
+                    .remove(0)
+                    .downcast::<vm::builtins::PyDict>()
+                    .expect("Should be a py dict");
+
+                let output = match unwrapped_process(input, vm) {
+                    Ok(ok) => ok,
+                    Err(err) => {
+                        return Err(vm.new_runtime_error(err.to_string()));
+                    }
+                };
+
+                Ok(output)
+            }
+
+            let scope = vm.new_scope_with_builtins();
+
+            scope.globals.set_item(
+                "actual_processor",
+                vm.new_function("actual_processor", actual_processor).into(),
+                vm,
+            )?;
+
+            let local_scope = scope.clone();
+            vm.run_code_string(
+                local_scope,
+                format!(
+                    "
+import yt_dlp
+
+class {}(yt_dlp.postprocessor.PostProcessor):
+    def run(self, info):
+        info = actual_processor(info)
+        return [], info
+
+inst = {}()
+",
+                    $name, $name
+                )
+                .as_str(),
+                "<embedded post processor initializing code>".to_owned(),
+            )?;
+
+            Ok(scope
+                .globals
+                .get_item("inst", vm)
+                .expect("We just declared it"))
+        }
+    };
+}
diff --git a/crates/yt_dlp/src/progress_hook.rs b/crates/yt_dlp/src/progress_hook.rs
new file mode 100644
index 0000000..b42ae21
--- /dev/null
+++ b/crates/yt_dlp/src/progress_hook.rs
@@ -0,0 +1,54 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+#[macro_export]
+macro_rules! mk_python_function {
+    ($name:ident, $new_name:ident) => {
+        pub fn $new_name(
+            mut args: $crate::progress_hook::__priv::vm::function::FuncArgs,
+            vm: &$crate::progress_hook::__priv::vm::VirtualMachine,
+        ) {
+            use $crate::progress_hook::__priv::vm;
+
+            let input = {
+                let dict: vm::PyRef<vm::builtins::PyDict> = args
+                    .args
+                    .remove(0)
+                    .downcast()
+                    .expect("The progress hook is always called with these args");
+                let new_dict = vm::builtins::PyDict::new_ref(&vm.ctx);
+                dict.into_iter()
+                    .filter_map(|(name, value)| {
+                        let real_name: vm::PyRefExact<vm::builtins::PyStr> =
+                            name.downcast_exact(vm).expect("Is a string");
+                        let name_str = real_name.to_str().expect("Is a string");
+                        if name_str.starts_with('_') {
+                            None
+                        } else {
+                            Some((name_str.to_owned(), value))
+                        }
+                    })
+                    .for_each(|(key, value)| {
+                        new_dict
+                            .set_item(&key, value, vm)
+                            .expect("This is a transpositions, should always be valid");
+                    });
+
+                $crate::progress_hook::__priv::json_dumps(new_dict, vm)
+            };
+            $name(input).expect("Shall not fail!");
+        }
+    };
+}
+
+pub mod __priv {
+    pub use crate::info_json::{json_dumps, json_loads};
+    pub use rustpython::vm;
+}
diff --git a/crates/yt_dlp/src/python_error.rs b/crates/yt_dlp/src/python_error.rs
new file mode 100644
index 0000000..9513956
--- /dev/null
+++ b/crates/yt_dlp/src/python_error.rs
@@ -0,0 +1,116 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::fmt::Display;
+
+use log::{Level, debug, log_enabled};
+use rustpython::vm::{
+    AsObject, PyPayload, PyRef, VirtualMachine,
+    builtins::{PyBaseException, PyBaseExceptionRef, PyStr},
+    py_io::Write,
+    suggestion::offer_suggestions,
+};
+
+#[derive(thiserror::Error, Debug)]
+pub struct PythonError(pub String);
+
+impl Display for PythonError {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(f, "Python threw an exception: {}", self.0)
+    }
+}
+
+impl PythonError {
+    pub(super) fn from_exception(vm: &VirtualMachine, exc: &PyRef<PyBaseException>) -> Self {
+        let buffer = process_exception(vm, exc);
+        Self(buffer)
+    }
+}
+
+pub(super) fn process_exception(vm: &VirtualMachine, err: &PyBaseExceptionRef) -> String {
+    let mut buffer = String::new();
+    write_exception(vm, &mut buffer, err)
+        .expect("We are writing into an *in-memory* string, it will always work");
+
+    if log_enabled!(Level::Debug) {
+        let mut output = String::new();
+        vm.write_exception(&mut output, err)
+            .expect("We are writing into an *in-memory* string, it will always work");
+        debug!("Python threw an exception: {output}");
+    }
+
+    buffer
+}
+
+// Inlined and changed from `vm.write_exception_inner`
+fn write_exception<W: Write>(
+    vm: &VirtualMachine,
+    output: &mut W,
+    exc: &PyBaseExceptionRef,
+) -> Result<(), W::Error> {
+    let varargs = exc.args();
+    let args_repr = {
+        match varargs.len() {
+            0 => vec![],
+            1 => {
+                let args0_repr = if true {
+                    varargs[0]
+                        .str(vm)
+                        .unwrap_or_else(|_| PyStr::from("<element str() failed>").into_ref(&vm.ctx))
+                } else {
+                    varargs[0].repr(vm).unwrap_or_else(|_| {
+                        PyStr::from("<element repr() failed>").into_ref(&vm.ctx)
+                    })
+                };
+                vec![args0_repr]
+            }
+            _ => varargs
+                .iter()
+                .map(|vararg| {
+                    vararg.repr(vm).unwrap_or_else(|_| {
+                        PyStr::from("<element repr() failed>").into_ref(&vm.ctx)
+                    })
+                })
+                .collect(),
+        }
+    };
+
+    let exc_class = exc.class();
+
+    if exc_class.fast_issubclass(vm.ctx.exceptions.syntax_error) {
+        unreachable!(
+            "A syntax error should never be raised, \
+            as yt_dlp should not have them and neither our embedded code"
+        );
+    }
+
+    let exc_name = exc_class.name();
+    match args_repr.len() {
+        0 => write!(output, "{exc_name}"),
+        1 => write!(output, "{}: {}", exc_name, args_repr[0]),
+        _ => write!(
+            output,
+            "{}: ({})",
+            exc_name,
+            args_repr
+                .iter()
+                .map(|val| val.as_str())
+                .collect::<Vec<_>>()
+                .join(", "),
+        ),
+    }?;
+
+    match offer_suggestions(exc, vm) {
+        Some(suggestions) => {
+            write!(output, ". Did you mean: '{suggestions}'?")
+        }
+        None => Ok(()),
+    }
+}
diff --git a/crates/yt_dlp/src/tests.rs b/crates/yt_dlp/src/tests.rs
deleted file mode 100644
index b48deb4..0000000
--- a/crates/yt_dlp/src/tests.rs
+++ /dev/null
@@ -1,85 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use std::sync::LazyLock;
-
-use serde_json::{json, Value};
-use url::Url;
-
-static YT_OPTS: LazyLock<serde_json::Map<String, Value>> = LazyLock::new(|| {
-    match json!({
-        "playliststart": 1,
-        "playlistend": 10,
-        "noplaylist": false,
-        "extract_flat": false,
-    }) {
-        Value::Object(obj) => obj,
-        _ => unreachable!("This json is hardcoded"),
-    }
-});
-
-#[tokio::test]
-async fn test_extract_info_video() {
-    let info = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/watch?v=dbjPnXaacAU").expect("Is valid."),
-        false,
-        false,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{info:#?}");
-}
-
-#[tokio::test]
-async fn test_extract_info_url() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://google.com").expect("Is valid."),
-        false,
-        false,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
-
-#[tokio::test]
-async fn test_extract_info_playlist() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/@TheGarriFrischer/videos").expect("Is valid."),
-        false,
-        true,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
-#[tokio::test]
-async fn test_extract_info_playlist_full() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/@NixOS-Foundation/videos").expect("Is valid."),
-        false,
-        true,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
diff --git a/crates/yt_dlp/src/wrapper/info_json.rs b/crates/yt_dlp/src/wrapper/info_json.rs
deleted file mode 100644
index 35d155e..0000000
--- a/crates/yt_dlp/src/wrapper/info_json.rs
+++ /dev/null
@@ -1,556 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// `yt_dlp` named them like this.
-#![allow(clippy::pub_underscore_fields)]
-
-use std::{collections::HashMap, path::PathBuf};
-
-use pyo3::{types::PyDict, Bound, PyResult, Python};
-use serde::{Deserialize, Deserializer, Serialize};
-use serde_json::Value;
-use url::Url;
-
-use crate::json_loads_str;
-
-type Todo = String;
-type Extractor = String;
-type ExtractorKey = String;
-
-// TODO: Change this to map `_type` to a structure of values, instead of the options <2024-05-27>
-// And replace all the strings with better types (enums or urls)
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct InfoJson {
-    pub __files_to_move: Option<FilesToMove>,
-    pub __last_playlist_index: Option<u32>,
-    pub __post_extractor: Option<String>,
-    pub __x_forwarded_for_ip: Option<String>,
-    pub _filename: Option<PathBuf>,
-    pub _format_sort_fields: Option<Vec<String>>,
-    pub _has_drm: Option<Todo>,
-    pub _type: Option<InfoType>,
-    pub _version: Option<Version>,
-    pub abr: Option<f64>,
-    pub acodec: Option<String>,
-    pub age_limit: Option<u32>,
-    pub aspect_ratio: Option<f64>,
-    pub asr: Option<u32>,
-    pub audio_channels: Option<u32>,
-    pub audio_ext: Option<String>,
-    pub automatic_captions: Option<HashMap<String, Vec<Caption>>>,
-    pub availability: Option<String>,
-    pub average_rating: Option<String>,
-    pub categories: Option<Vec<String>>,
-    pub channel: Option<String>,
-    pub channel_follower_count: Option<u32>,
-    pub channel_id: Option<String>,
-    pub channel_is_verified: Option<bool>,
-    pub channel_url: Option<String>,
-    pub chapters: Option<Vec<Chapter>>,
-    pub comment_count: Option<u32>,
-    pub comments: Option<Vec<Comment>>,
-    pub concurrent_view_count: Option<u32>,
-    pub description: Option<String>,
-    pub display_id: Option<String>,
-    pub downloader_options: Option<DownloaderOptions>,
-    pub duration: Option<f64>,
-    pub duration_string: Option<String>,
-    pub dynamic_range: Option<String>,
-    pub entries: Option<Vec<InfoJson>>,
-    pub episode: Option<String>,
-    pub episode_number: Option<u32>,
-    pub epoch: Option<u32>,
-    pub ext: Option<String>,
-    pub extractor: Option<Extractor>,
-    pub extractor_key: Option<ExtractorKey>,
-    pub filename: Option<PathBuf>,
-    pub filesize: Option<u64>,
-    pub filesize_approx: Option<u64>,
-    pub format: Option<String>,
-    pub format_id: Option<String>,
-    pub format_index: Option<u32>,
-    pub format_note: Option<String>,
-    pub formats: Option<Vec<Format>>,
-    pub fps: Option<f64>,
-    pub fulltitle: Option<String>,
-    pub has_drm: Option<bool>,
-    pub heatmap: Option<Vec<HeatMapEntry>>,
-    pub height: Option<u32>,
-    pub http_headers: Option<HttpHeader>,
-    pub id: Option<String>,
-    pub ie_key: Option<ExtractorKey>,
-    pub is_live: Option<bool>,
-    pub language: Option<String>,
-    pub language_preference: Option<i32>,
-    pub license: Option<Todo>,
-    pub like_count: Option<u32>,
-    pub live_status: Option<String>,
-    pub location: Option<Todo>,
-    pub manifest_url: Option<Url>,
-    pub modified_date: Option<String>,
-    pub n_entries: Option<u32>,
-    pub original_url: Option<String>,
-    pub playable_in_embed: Option<bool>,
-    pub playlist: Option<Todo>,
-    pub playlist_autonumber: Option<u32>,
-    pub playlist_channel: Option<Todo>,
-    pub playlist_channel_id: Option<Todo>,
-    pub playlist_count: Option<u32>,
-    pub playlist_id: Option<Todo>,
-    pub playlist_index: Option<u64>,
-    pub playlist_title: Option<Todo>,
-    pub playlist_uploader: Option<Todo>,
-    pub playlist_uploader_id: Option<Todo>,
-    pub preference: Option<Todo>,
-    pub protocol: Option<String>,
-    pub quality: Option<f64>,
-    pub release_date: Option<String>,
-    pub release_timestamp: Option<u64>,
-    pub release_year: Option<u32>,
-    pub requested_downloads: Option<Vec<RequestedDownloads>>,
-    pub requested_entries: Option<Vec<u32>>,
-    pub requested_formats: Option<Vec<Format>>,
-    pub requested_subtitles: Option<HashMap<String, Subtitle>>,
-    pub resolution: Option<String>,
-    pub season: Option<String>,
-    pub season_number: Option<u32>,
-    pub series: Option<String>,
-    pub source_preference: Option<i32>,
-    pub sponsorblock_chapters: Option<Vec<SponsorblockChapter>>,
-    pub stretched_ratio: Option<Todo>,
-    pub subtitles: Option<HashMap<String, Vec<Caption>>>,
-    pub tags: Option<Vec<String>>,
-    pub tbr: Option<f64>,
-    pub thumbnail: Option<Url>,
-    pub thumbnails: Option<Vec<ThumbNail>>,
-    pub timestamp: Option<u64>,
-    pub title: Option<String>,
-    pub upload_date: Option<String>,
-    pub uploader: Option<String>,
-    pub uploader_id: Option<String>,
-    pub uploader_url: Option<String>,
-    pub url: Option<Url>,
-    pub vbr: Option<f64>,
-    pub vcodec: Option<String>,
-    pub video_ext: Option<String>,
-    pub view_count: Option<u32>,
-    pub was_live: Option<bool>,
-    pub webpage_url: Option<Url>,
-    pub webpage_url_basename: Option<String>,
-    pub webpage_url_domain: Option<String>,
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct FilesToMove {}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct RequestedDownloads {
-    pub __files_to_merge: Option<Vec<Todo>>,
-    pub __finaldir: PathBuf,
-    pub __infojson_filename: PathBuf,
-    pub __postprocessors: Vec<Todo>,
-    pub __real_download: bool,
-    pub __write_download_archive: bool,
-    pub _filename: PathBuf,
-    pub _type: InfoType,
-    pub _version: Version,
-    pub abr: f64,
-    pub acodec: String,
-    pub aspect_ratio: Option<f64>,
-    pub asr: Option<u32>,
-    pub audio_channels: Option<u32>,
-    pub audio_ext: Option<String>,
-    pub chapters: Option<Vec<SponsorblockChapter>>,
-    pub duration: Option<f64>,
-    pub dynamic_range: Option<String>,
-    pub ext: String,
-    pub filename: PathBuf,
-    pub filepath: PathBuf,
-    pub filesize_approx: Option<u64>,
-    pub format: String,
-    pub format_id: String,
-    pub format_note: String,
-    pub fps: Option<f64>,
-    pub has_drm: Option<bool>,
-    pub height: Option<u32>,
-    pub http_headers: Option<HttpHeader>,
-    pub infojson_filename: PathBuf,
-    pub language: Option<String>,
-    pub manifest_url: Option<Url>,
-    pub protocol: String,
-    pub requested_formats: Option<Vec<Format>>,
-    pub resolution: String,
-    pub tbr: f64,
-    pub url: Option<Url>,
-    pub vbr: f64,
-    pub vcodec: String,
-    pub video_ext: Option<String>,
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Subtitle {
-    pub ext: SubtitleExt,
-    pub filepath: PathBuf,
-    pub filesize: Option<u64>,
-    pub fragment_base_url: Option<Url>,
-    pub fragments: Option<Vec<Fragment>>,
-    pub manifest_url: Option<Url>,
-    pub name: Option<String>,
-    pub protocol: Option<Todo>,
-    pub url: Url,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-pub enum SubtitleExt {
-    #[serde(alias = "vtt")]
-    Vtt,
-
-    #[serde(alias = "mp4")]
-    Mp4,
-
-    #[serde(alias = "json")]
-    Json,
-    #[serde(alias = "json3")]
-    Json3,
-
-    #[serde(alias = "ttml")]
-    Ttml,
-
-    #[serde(alias = "srv1")]
-    Srv1,
-    #[serde(alias = "srv2")]
-    Srv2,
-    #[serde(alias = "srv3")]
-    Srv3,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Caption {
-    pub ext: SubtitleExt,
-    pub filepath: Option<PathBuf>,
-    pub filesize: Option<u64>,
-    pub fragments: Option<Vec<SubtitleFragment>>,
-    pub fragment_base_url: Option<Url>,
-    pub manifest_url: Option<Url>,
-    pub name: Option<String>,
-    pub protocol: Option<String>,
-    pub url: String,
-    pub video_id: Option<String>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct SubtitleFragment {
-    path: PathBuf,
-    duration: Option<f64>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Chapter {
-    pub end_time: f64,
-    pub start_time: f64,
-    pub title: String,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct SponsorblockChapter {
-    /// This is an utterly useless field, and should thus be ignored
-    pub _categories: Option<Vec<Vec<Value>>>,
-
-    pub categories: Option<Vec<SponsorblockChapterCategory>>,
-    pub category: Option<SponsorblockChapterCategory>,
-    pub category_names: Option<Vec<String>>,
-    pub end_time: f64,
-    pub name: Option<String>,
-    pub r#type: Option<SponsorblockChapterType>,
-    pub start_time: f64,
-    pub title: String,
-}
-
-pub fn get_none<'de, D, T>(_: D) -> Result<Option<T>, D::Error>
-where
-    D: Deserializer<'de>,
-{
-    Ok(None)
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum SponsorblockChapterType {
-    #[serde(alias = "skip")]
-    Skip,
-
-    #[serde(alias = "chapter")]
-    Chapter,
-
-    #[serde(alias = "poi")]
-    Poi,
-}
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum SponsorblockChapterCategory {
-    #[serde(alias = "filler")]
-    Filler,
-
-    #[serde(alias = "interaction")]
-    Interaction,
-
-    #[serde(alias = "music_offtopic")]
-    MusicOfftopic,
-
-    #[serde(alias = "poi_highlight")]
-    PoiHighlight,
-
-    #[serde(alias = "preview")]
-    Preview,
-
-    #[serde(alias = "sponsor")]
-    Sponsor,
-
-    #[serde(alias = "selfpromo")]
-    SelfPromo,
-
-    #[serde(alias = "chapter")]
-    Chapter,
-
-    #[serde(alias = "intro")]
-    Intro,
-
-    #[serde(alias = "outro")]
-    Outro,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct HeatMapEntry {
-    pub start_time: f64,
-    pub end_time: f64,
-    pub value: f64,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum InfoType {
-    #[serde(alias = "playlist")]
-    Playlist,
-
-    #[serde(alias = "url")]
-    Url,
-
-    #[serde(alias = "video")]
-    Video,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct Version {
-    pub current_git_head: Option<String>,
-    pub release_git_head: String,
-    pub repository: String,
-    pub version: String,
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(from = "String")]
-#[serde(deny_unknown_fields)]
-pub enum Parent {
-    Root,
-    Id(String),
-}
-
-impl Parent {
-    #[must_use]
-    pub fn id(&self) -> Option<&str> {
-        if let Self::Id(id) = self {
-            Some(id)
-        } else {
-            None
-        }
-    }
-}
-
-impl From<String> for Parent {
-    fn from(value: String) -> Self {
-        if value == "root" {
-            Self::Root
-        } else {
-            Self::Id(value)
-        }
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(from = "String")]
-#[serde(deny_unknown_fields)]
-pub struct Id {
-    pub id: String,
-}
-impl From<String> for Id {
-    fn from(value: String) -> Self {
-        Self {
-            // Take the last element if the string is split with dots, otherwise take the full id
-            id: value.split('.').last().unwrap_or(&value).to_owned(),
-        }
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-#[allow(clippy::struct_excessive_bools)]
-pub struct Comment {
-    pub id: Id,
-    pub text: String,
-    #[serde(default = "zero")]
-    pub like_count: u32,
-    pub is_pinned: bool,
-    pub author_id: String,
-    #[serde(default = "unknown")]
-    pub author: String,
-    pub author_is_verified: bool,
-    pub author_thumbnail: Url,
-    pub parent: Parent,
-    #[serde(deserialize_with = "edited_from_time_text", alias = "_time_text")]
-    pub edited: bool,
-    // Can't also be deserialized, as it's already used in 'edited'
-    // _time_text: String,
-    pub timestamp: i64,
-    pub author_url: Url,
-    pub author_is_uploader: bool,
-    pub is_favorited: bool,
-}
-fn unknown() -> String {
-    "<Unknown>".to_string()
-}
-fn zero() -> u32 {
-    0
-}
-fn edited_from_time_text<'de, D>(d: D) -> Result<bool, D::Error>
-where
-    D: Deserializer<'de>,
-{
-    let s = String::deserialize(d)?;
-    if s.contains(" (edited)") {
-        Ok(true)
-    } else {
-        Ok(false)
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct ThumbNail {
-    pub id: Option<String>,
-    pub preference: Option<i32>,
-    /// in the form of "[`height`]x[`width`]"
-    pub resolution: Option<String>,
-    pub url: Url,
-    pub width: Option<u32>,
-    pub height: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Format {
-    pub __needs_testing: Option<bool>,
-    pub __working: Option<bool>,
-    pub abr: Option<f64>,
-    pub acodec: Option<String>,
-    pub aspect_ratio: Option<f64>,
-    pub asr: Option<f64>,
-    pub audio_channels: Option<u32>,
-    pub audio_ext: Option<String>,
-    pub columns: Option<u32>,
-    pub container: Option<String>,
-    pub downloader_options: Option<DownloaderOptions>,
-    pub dynamic_range: Option<String>,
-    pub ext: String,
-    pub filepath: Option<PathBuf>,
-    pub filesize: Option<u64>,
-    pub filesize_approx: Option<u64>,
-    pub format: Option<String>,
-    pub format_id: String,
-    pub format_index: Option<String>,
-    pub format_note: Option<String>,
-    pub fps: Option<f64>,
-    pub fragment_base_url: Option<Todo>,
-    pub fragments: Option<Vec<Fragment>>,
-    pub has_drm: Option<bool>,
-    pub height: Option<u32>,
-    pub http_headers: Option<HttpHeader>,
-    pub is_dash_periods: Option<bool>,
-    pub language: Option<String>,
-    pub language_preference: Option<i32>,
-    pub manifest_stream_number: Option<u32>,
-    pub manifest_url: Option<Url>,
-    pub preference: Option<i32>,
-    pub protocol: Option<String>,
-    pub quality: Option<f64>,
-    pub resolution: Option<String>,
-    pub rows: Option<u32>,
-    pub source_preference: Option<i32>,
-    pub tbr: Option<f64>,
-    pub url: Url,
-    pub vbr: Option<f64>,
-    pub vcodec: String,
-    pub video_ext: Option<String>,
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct DownloaderOptions {
-    http_chunk_size: u64,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct HttpHeader {
-    #[serde(alias = "User-Agent")]
-    pub user_agent: Option<String>,
-
-    #[serde(alias = "Accept")]
-    pub accept: Option<String>,
-
-    #[serde(alias = "X-Forwarded-For")]
-    pub x_forwarded_for: Option<String>,
-
-    #[serde(alias = "Accept-Language")]
-    pub accept_language: Option<String>,
-
-    #[serde(alias = "Sec-Fetch-Mode")]
-    pub sec_fetch_mode: Option<String>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Fragment {
-    pub url: Option<Url>,
-    pub duration: Option<f64>,
-    pub path: Option<PathBuf>,
-}
-
-impl InfoJson {
-    pub fn to_py_dict(self, py: Python<'_>) -> PyResult<Bound<'_, PyDict>> {
-        let output: Bound<'_, PyDict> = json_loads_str(py, self)?;
-        Ok(output)
-    }
-}
diff --git a/crates/yt_dlp/src/wrapper/yt_dlp_options.rs b/crates/yt_dlp/src/wrapper/yt_dlp_options.rs
deleted file mode 100644
index c2a86df..0000000
--- a/crates/yt_dlp/src/wrapper/yt_dlp_options.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use pyo3::{types::PyDict, Bound, PyResult, Python};
-use serde::Serialize;
-
-use crate::json_loads;
-
-#[derive(Serialize, Clone)]
-pub struct YtDlpOptions {
-    pub playliststart: u32,
-    pub playlistend: u32,
-    pub noplaylist: bool,
-    pub extract_flat: ExtractFlat,
-    // pub extractor_args: ExtractorArgs,
-    // pub format: String,
-    // pub fragment_retries: u32,
-    // #[serde(rename(serialize = "getcomments"))]
-    // pub get_comments: bool,
-    // #[serde(rename(serialize = "ignoreerrors"))]
-    // pub ignore_errors: bool,
-    // pub retries: u32,
-    // #[serde(rename(serialize = "writeinfojson"))]
-    // pub write_info_json: bool,
-    // pub postprocessors: Vec<serde_json::Map<String, serde_json::Value>>,
-}
-
-#[derive(Serialize, Copy, Clone)]
-pub enum ExtractFlat {
-    #[serde(rename(serialize = "in_playlist"))]
-    InPlaylist,
-
-    #[serde(rename(serialize = "discard_in_playlist"))]
-    DiscardInPlaylist,
-}
-
-#[derive(Serialize, Clone)]
-pub struct ExtractorArgs {
-    pub youtube: YoutubeExtractorArgs,
-}
-
-#[derive(Serialize, Clone)]
-pub struct YoutubeExtractorArgs {
-    comment_sort: Vec<String>,
-    max_comments: Vec<String>,
-}
-
-impl YtDlpOptions {
-    pub fn to_py_dict(self, py: Python) -> PyResult<Bound<PyDict>> {
-        let string = serde_json::to_string(&self).expect("This should always work");
-
-        let output: Bound<PyDict> = json_loads(py, string)?;
-        Ok(output)
-    }
-}