about summary refs log tree commit diff stats
diff options
context:
space:
mode:
-rw-r--r--Cargo.toml4
-rw-r--r--crates/yt_dlp/.cargo/config.toml12
-rw-r--r--crates/yt_dlp/Cargo.toml13
-rw-r--r--crates/yt_dlp/src/duration.rs78
-rw-r--r--crates/yt_dlp/src/error.rs68
-rw-r--r--crates/yt_dlp/src/lib.rs954
-rw-r--r--crates/yt_dlp/src/logging.rs148
-rw-r--r--crates/yt_dlp/src/progress_hook.rs41
-rw-r--r--crates/yt_dlp/src/python_json_decode_failed.error_msg5
-rw-r--r--crates/yt_dlp/src/python_json_decode_failed.error_msg.license9
-rw-r--r--crates/yt_dlp/src/tests.rs89
-rw-r--r--crates/yt_dlp/src/wrapper/info_json.rs827
-rw-r--r--crates/yt_dlp/src/wrapper/mod.rs12
-rw-r--r--crates/yt_dlp/src/wrapper/yt_dlp_options.rs62
-rw-r--r--yt/src/cli.rs12
-rw-r--r--yt/src/comments/comment.rs89
-rw-r--r--yt/src/comments/description.rs8
-rw-r--r--yt/src/comments/mod.rs26
-rw-r--r--yt/src/download/download_options.rs197
-rw-r--r--yt/src/download/mod.rs27
-rw-r--r--yt/src/download/progress_hook.rs190
-rw-r--r--yt/src/main.rs20
-rw-r--r--yt/src/select/cmds/add.rs104
-rw-r--r--yt/src/storage/subscriptions.rs25
-rw-r--r--yt/src/storage/video_database/get/mod.rs2
-rw-r--r--yt/src/subscribe/mod.rs27
-rw-r--r--yt/src/update/mod.rs69
-rw-r--r--yt/src/update/updater.rs117
28 files changed, 1216 insertions, 2019 deletions
diff --git a/Cargo.toml b/Cargo.toml
index 403c8f1..222ef7e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -59,6 +59,10 @@ codegen-units = 1
 panic = "abort"
 split-debuginfo = "off"
 
+[profile.dev]
+# Otherwise, yt_dlp is just too slow
+opt-level = 2
+
 [workspace.lints.rust]
 # rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
 warnings = "warn"
diff --git a/crates/yt_dlp/.cargo/config.toml b/crates/yt_dlp/.cargo/config.toml
deleted file mode 100644
index d84f14d..0000000
--- a/crates/yt_dlp/.cargo/config.toml
+++ /dev/null
@@ -1,12 +0,0 @@
-# yt - A fully featured command line YouTube client
-#
-# Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-# SPDX-License-Identifier: GPL-3.0-or-later
-#
-# This file is part of Yt.
-#
-# You should have received a copy of the License along with this program.
-# If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-[env]
-PYO3_PYTHON = "/nix/store/7xzk119acyws2c4ysygdv66l0grxkr39-python3-3.11.9-env/bin/python3"
diff --git a/crates/yt_dlp/Cargo.toml b/crates/yt_dlp/Cargo.toml
index b80c70f..ddd5f9b 100644
--- a/crates/yt_dlp/Cargo.toml
+++ b/crates/yt_dlp/Cargo.toml
@@ -10,7 +10,7 @@
 
 [package]
 name = "yt_dlp"
-description = "A wrapper around the python yt_dlp library"
+description = "A rust fii wrapper library for the python yt_dlp library"
 keywords = []
 categories = []
 version.workspace = true
@@ -19,19 +19,16 @@ authors.workspace = true
 license.workspace = true
 repository.workspace = true
 rust-version.workspace = true
-publish = false
+publish = true
 
 [dependencies]
-pyo3 = { version = "0.24.0", features = ["auto-initialize"] }
-bytes.workspace = true
+indexmap = { version = "2.9.0", default-features = false }
 log.workspace = true
-serde.workspace = true
+rustpython = { git = "https://github.com/RustPython/RustPython.git", features = ["threading", "stdlib", "stdio", "importlib", "ssl"], default-features = false }
 serde_json.workspace = true
+thiserror = "2.0.12"
 url.workspace = true
 
-[dev-dependencies]
-tokio.workspace = true
-
 [lints]
 workspace = true
 
diff --git a/crates/yt_dlp/src/duration.rs b/crates/yt_dlp/src/duration.rs
deleted file mode 100644
index 19181a5..0000000
--- a/crates/yt_dlp/src/duration.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// TODO: This file should be de-duplicated with the same file in the 'yt' crate <2024-06-25>
-
-#[derive(Debug, Clone, Copy)]
-pub struct Duration {
-    time: u32,
-}
-
-impl From<&str> for Duration {
-    fn from(v: &str) -> Self {
-        let buf: Vec<_> = v.split(':').take(2).collect();
-        Self {
-            time: (buf[0].parse::<u32>().expect("Should be a number") * 60)
-                + buf[1].parse::<u32>().expect("Should be a number"),
-        }
-    }
-}
-
-impl From<Option<f64>> for Duration {
-    fn from(value: Option<f64>) -> Self {
-        Self {
-            #[allow(
-                clippy::cast_possible_truncation,
-                clippy::cast_precision_loss,
-                clippy::cast_sign_loss
-            )]
-            time: value.unwrap_or(0.0).ceil() as u32,
-        }
-    }
-}
-
-impl std::fmt::Display for Duration {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
-        const SECOND: u32 = 1;
-        const MINUTE: u32 = 60 * SECOND;
-        const HOUR: u32 = 60 * MINUTE;
-
-        let base_hour = self.time - (self.time % HOUR);
-        let base_min = (self.time % HOUR) - ((self.time % HOUR) % MINUTE);
-        let base_sec = (self.time % HOUR) % MINUTE;
-
-        let h = base_hour / HOUR;
-        let m = base_min / MINUTE;
-        let s = base_sec / SECOND;
-
-        if self.time == 0 {
-            write!(f, "0s")
-        } else if h > 0 {
-            write!(f, "{h}h {m}m")
-        } else {
-            write!(f, "{m}m {s}s")
-        }
-    }
-}
-#[cfg(test)]
-mod test {
-    use super::Duration;
-
-    #[test]
-    fn test_display_duration_1h() {
-        let dur = Duration { time: 60 * 60 };
-        assert_eq!("1h 0m".to_owned(), dur.to_string());
-    }
-    #[test]
-    fn test_display_duration_30min() {
-        let dur = Duration { time: 60 * 30 };
-        assert_eq!("30m 0s".to_owned(), dur.to_string());
-    }
-}
diff --git a/crates/yt_dlp/src/error.rs b/crates/yt_dlp/src/error.rs
deleted file mode 100644
index 3881f0b..0000000
--- a/crates/yt_dlp/src/error.rs
+++ /dev/null
@@ -1,68 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use std::{fmt::Display, io};
-
-use pyo3::Python;
-
-#[derive(Debug)]
-#[allow(clippy::module_name_repetitions)]
-pub enum YtDlpError {
-    ResponseParseError {
-        error: serde_json::error::Error,
-    },
-    PythonError {
-        error: Box<pyo3::PyErr>,
-        kind: String,
-    },
-    IoError {
-        error: io::Error,
-    },
-}
-
-impl std::error::Error for YtDlpError {}
-
-impl Display for YtDlpError {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        match self {
-            YtDlpError::ResponseParseError { error } => write!(
-                f,
-                include_str!("./python_json_decode_failed.error_msg"),
-                error
-            ),
-            YtDlpError::PythonError { error, kind: _ } => write!(f, "Python error: {error}"),
-            YtDlpError::IoError { error } => write!(f, "Io error: {error}"),
-        }
-    }
-}
-
-impl From<serde_json::error::Error> for YtDlpError {
-    fn from(value: serde_json::error::Error) -> Self {
-        Self::ResponseParseError { error: value }
-    }
-}
-
-impl From<pyo3::PyErr> for YtDlpError {
-    fn from(value: pyo3::PyErr) -> Self {
-        Python::with_gil(|py| {
-            let kind = value.get_type(py).to_string();
-            Self::PythonError {
-                error: Box::new(value),
-                kind,
-            }
-        })
-    }
-}
-
-impl From<io::Error> for YtDlpError {
-    fn from(value: io::Error) -> Self {
-        Self::IoError { error: value }
-    }
-}
diff --git a/crates/yt_dlp/src/lib.rs b/crates/yt_dlp/src/lib.rs
index c6d9290..34b8a5d 100644
--- a/crates/yt_dlp/src/lib.rs
+++ b/crates/yt_dlp/src/lib.rs
@@ -1,549 +1,541 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
-#![allow(unsafe_op_in_unsafe_fn)]
-#![allow(clippy::missing_errors_doc)]
-
-use std::io::stderr;
-use std::{env, process};
-use std::{fs::File, io::Write};
-
-use std::{path::PathBuf, sync::Once};
-
-use crate::{duration::Duration, logging::setup_logging, wrapper::info_json::InfoJson};
-
-use bytes::Bytes;
-use error::YtDlpError;
-use log::{Level, debug, info, log_enabled};
-use pyo3::types::{PyString, PyTuple, PyTupleMethods};
-use pyo3::{
-    Bound, PyAny, PyResult, Python, pyfunction,
-    types::{PyAnyMethods, PyDict, PyDictMethods, PyList, PyListMethods, PyModule},
-    wrap_pyfunction,
+//! The `yt_dlp` interface is completely contained in the [`YoutubeDL`] structure.
+
+use std::io::Write;
+use std::mem;
+use std::{env, fs::File, path::PathBuf};
+
+use indexmap::IndexMap;
+use log::{Level, debug, error, info, log_enabled};
+use logging::setup_logging;
+use rustpython::vm::builtins::PyList;
+use rustpython::{
+    InterpreterConfig,
+    vm::{
+        self, Interpreter, PyObjectRef, PyRef, VirtualMachine,
+        builtins::{PyBaseException, PyDict, PyStr},
+        function::{FuncArgs, KwArgs, PosArgs},
+    },
 };
-use serde::Serialize;
-use serde_json::{Map, Value};
 use url::Url;
 
-pub mod duration;
-pub mod error;
-pub mod logging;
-pub mod wrapper;
+mod logging;
+pub mod progress_hook;
 
-#[cfg(test)]
-mod tests;
-
-/// Synchronisation helper, to ensure that we don't setup the logger multiple times
-static SYNC_OBJ: Once = Once::new();
+#[macro_export]
+macro_rules! json_get {
+    ($value:expr, $name:literal, $into:ident) => {
+        $crate::json_cast!($value.get($name).expect("Should exist"), $into)
+    };
+}
 
-/// Add a logger to the yt-dlp options.
-/// If you have an logger set (i.e. for rust), than this will log to rust
-///
-/// # Panics
-/// This should never panic.
-pub fn add_logger_and_sig_handler<'a>(
-    opts: Bound<'a, PyDict>,
-    py: Python<'_>,
-) -> PyResult<Bound<'a, PyDict>> {
-    /// Is the specified record to be logged? Returns false for no,
-    /// true for yes. Filters can either modify log records in-place or
-    /// return a completely different record instance which will replace
-    /// the original log record in any future processing of the event.
-    #[pyfunction]
-    fn filter_error_log(_py: Python<'_>, record: &Bound<'_, PyAny>) -> bool {
-        // Filter out all error logs (they are propagated as rust errors)
-        let levelname: String = record
-            .getattr("levelname")
-            .expect("This should exist")
-            .extract()
-            .expect("This should be a String");
-
-        let return_value = levelname.as_str() != "ERROR";
-
-        if log_enabled!(Level::Debug) && !return_value {
-            let message: String = record
-                .call_method0("getMessage")
-                .expect("This method exists")
-                .extract()
-                .expect("The message is a string");
-
-            debug!("Swollowed error message: '{message}'");
-        }
-        return_value
-    }
+#[macro_export]
+macro_rules! json_cast {
+    ($value:expr, $into:ident) => {
+        $value.$into().expect(concat!(
+            "Should be able to cast value into ",
+            stringify!($into)
+        ))
+    };
+}
 
-    setup_logging(py, "yt_dlp")?;
-
-    let logging = PyModule::import(py, "logging")?;
-    let ytdl_logger = logging.call_method1("getLogger", ("yt_dlp",))?;
-
-    // Ensure that all events are logged by setting the log level to NOTSET (we filter on rust's side)
-    // Also use this static, to ensure that we don't configure the logger every time
-    SYNC_OBJ.call_once(|| {
-        // Disable the SIGINT (Ctrl+C) handler, python installs.
-        // This allows the user to actually stop the application with Ctrl+C.
-        // This is here because it can only be run in the main thread and this was here already.
-        py.run(
-            c"\
-import signal
-signal.signal(signal.SIGINT, signal.SIG_DFL)",
-            None,
-            None,
-        )
-        .expect("This code should always work");
-
-        let config_opts = PyDict::new(py);
-        config_opts
-            .set_item("level", 0)
-            .expect("Setting this item should always work");
-
-        logging
-            .call_method("basicConfig", (), Some(&config_opts))
-            .expect("This method exists");
-    });
-
-    ytdl_logger.call_method1(
-        "addFilter",
-        (wrap_pyfunction!(filter_error_log, py).expect("This function can be wrapped"),),
-    )?;
-
-    // This was taken from `ytcc`, I don't think it is still applicable
-    // ytdl_logger.setattr("propagate", false)?;
-    // let logging_null_handler = logging.call_method0("NullHandler")?;
-    // ytdl_logger.setattr("addHandler", logging_null_handler)?;
-
-    opts.set_item("logger", ytdl_logger).expect("Should work");
-
-    Ok(opts)
+/// The core of the `yt_dlp` interface.
+pub struct YoutubeDL {
+    interpreter: Interpreter,
+    youtube_dl_class: PyObjectRef,
+    yt_dlp_module: PyObjectRef,
+    options: serde_json::Map<String, serde_json::Value>,
 }
 
-#[pyfunction]
-#[allow(clippy::too_many_lines)]
-#[allow(clippy::missing_panics_doc)]
-#[allow(clippy::items_after_statements)]
-#[allow(
-    clippy::cast_possible_truncation,
-    clippy::cast_sign_loss,
-    clippy::cast_precision_loss
-)]
-pub fn progress_hook(py: Python<'_>, input: &Bound<'_, PyDict>) -> PyResult<()> {
-    // Only add the handler, if the log-level is higher than Debug (this avoids covering debug
-    // messages).
-    if log_enabled!(Level::Debug) {
-        return Ok(());
+impl std::fmt::Debug for YoutubeDL {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        // TODO(@bpeetz): Use something useful here. <2025-06-13>
+        f.write_str("YoutubeDL")
     }
+}
 
-    // ANSI ESCAPE CODES Wrappers {{{
-    // see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
-    const CSI: &str = "\x1b[";
-    fn clear_whole_line() {
-        eprint!("{CSI}2K");
-    }
-    fn move_to_col(x: usize) {
-        eprint!("{CSI}{x}G");
-    }
-    // }}}
-
-    let input: Map<String, Value> = serde_json::from_str(&json_dumps(
-        py,
-        input
-            .downcast::<PyAny>()
-            .expect("Will always work")
-            .to_owned(),
-    )?)
-    .expect("python's json is valid");
-
-    macro_rules! get {
-        (@interrogate $item:ident, $type_fun:ident, $get_fun:ident, $name:expr) => {{
-            let a = $item.get($name).expect(concat!(
-                "The field '",
-                stringify!($name),
-                "' should exist."
-            ));
-
-            if a.$type_fun() {
-                a.$get_fun().expect(
-                    "The should have been checked in the if guard, so unpacking here is fine",
-                )
-            } else {
-                panic!(
-                    "Value {} => \n{}\n is not of type: {}",
-                    $name,
-                    a,
-                    stringify!($type_fun)
-                );
+impl YoutubeDL {
+    /// Construct this instance from options.
+    ///
+    /// # Panics
+    /// If `yt_dlp` changed their interface.
+    ///
+    /// # Errors
+    /// If a python call fails.
+    pub fn from_options(mut options: YoutubeDLOptions) -> Result<Self, build::Error> {
+        let mut settings = vm::Settings::default();
+        if let Ok(python_path) = env::var("PYTHONPATH") {
+            for path in python_path.split(':') {
+                settings.path_list.push(path.to_owned());
             }
-        }};
+        } else {
+            error!(
+                "No PYTHONPATH found or invalid utf8. \
+                This means, that you probably did not \
+                supply the yt_dlp!"
+            );
+        }
 
-        ($type_fun:ident, $get_fun:ident, $name1:expr, $name2:expr) => {{
-            let a = get! {@interrogate input, is_object, as_object, $name1};
-            let b = get! {@interrogate a, $type_fun, $get_fun, $name2};
-            b
-        }};
+        settings.install_signal_handlers = false;
 
-        ($type_fun:ident, $get_fun:ident, $name:expr) => {{
-            get! {@interrogate input, $type_fun, $get_fun, $name}
-        }};
-    }
+        // NOTE(@bpeetz): Another value leads to an internal codegen error. <2025-06-13>
+        settings.optimize = 0;
 
-    macro_rules! default_get {
-        (@interrogate $item:ident, $default:expr, $get_fun:ident, $name:expr) => {{
-            let a = if let Some(field) = $item.get($name) {
-                field.$get_fun().unwrap_or($default)
-            } else {
-                $default
-            };
-            a
-        }};
-
-        ($get_fun:ident, $default:expr, $name1:expr, $name2:expr) => {{
-            let a = get! {@interrogate input, is_object, as_object, $name1};
-            let b = default_get! {@interrogate a, $default, $get_fun, $name2};
-            b
-        }};
-
-        ($get_fun:ident, $default:expr, $name:expr) => {{
-            default_get! {@interrogate input, $default, $get_fun, $name}
-        }};
-    }
+        settings.isolated = true;
 
-    macro_rules! c {
-        ($color:expr, $format:expr) => {
-            format!("\x1b[{}m{}\x1b[0m", $color, $format)
-        };
-    }
+        let interpreter = InterpreterConfig::new()
+            .init_stdlib()
+            .settings(settings)
+            .interpreter();
 
-    fn format_bytes(bytes: u64) -> String {
-        let bytes = Bytes::new(bytes);
-        bytes.to_string()
-    }
+        let output_options = options.options.clone();
 
-    fn format_speed(speed: f64) -> String {
-        #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
-        let bytes = Bytes::new(speed.floor() as u64);
-        format!("{bytes}/s")
-    }
+        let (yt_dlp_module, youtube_dl_class) = match interpreter.enter(|vm| {
+            let yt_dlp_module = vm.import("yt_dlp", 0)?;
+            let class = yt_dlp_module.get_attr("YoutubeDL", vm)?;
 
-    let get_title = || -> String {
-        match get! {is_string, as_str, "info_dict", "ext"} {
-            "vtt" => {
-                format!(
-                    "Subtitles ({})",
-                    default_get! {as_str, "<No Subtitle Language>", "info_dict", "name"}
-                )
+            let maybe_hook = mem::take(&mut options.progress_hook);
+            let opts = options.into_py_dict(vm);
+            if let Some(function) = maybe_hook {
+                opts.get_or_insert(vm, vm.new_pyobj("progress_hooks"), || {
+                    let hook: PyObjectRef = vm.new_function("progress_hook", function).into();
+                    vm.new_pyobj(vec![hook])
+                })
+                .expect("Should work?");
             }
-            "webm" | "mp4" | "mp3" | "m4a" => {
-                default_get! { as_str, "<No title>", "info_dict", "title"}.to_owned()
-            }
-            other => panic!("The extension '{other}' is not yet implemented"),
-        }
-    };
 
-    match get! {is_string, as_str, "status"} {
-        "downloading" => {
-            let elapsed = default_get! {as_f64, 0.0f64, "elapsed"};
-            let eta = default_get! {as_f64, 0.0, "eta"};
-            let speed = default_get! {as_f64, 0.0, "speed"};
-
-            let downloaded_bytes = get! {is_u64, as_u64, "downloaded_bytes"};
-            let (total_bytes, bytes_is_estimate): (u64, &'static str) = {
-                let total_bytes = default_get!(as_u64, 0, "total_bytes");
-                if total_bytes == 0 {
-                    let maybe_estimate = default_get!(as_u64, 0, "total_bytes_estimate");
-
-                    if maybe_estimate == 0 {
-                        // The download speed should be in bytes per second and the eta in seconds.
-                        // Thus multiplying them gets us the raw bytes (which were estimated by `yt_dlp`, from their `info.json`)
-                        let bytes_still_needed = (speed * eta).ceil() as u64;
-
-                        (downloaded_bytes + bytes_still_needed, "~")
-                    } else {
-                        (maybe_estimate, "~")
+            {
+                // Unconditionally set a logger.
+                // Otherwise, yt_dlp will log to stderr.
+
+                /// Is the specified record to be logged? Returns false for no,
+                /// true for yes. Filters can either modify log records in-place or
+                /// return a completely different record instance which will replace
+                /// the original log record in any future processing of the event.
+                fn filter_error_log(mut input: FuncArgs, vm: &VirtualMachine) -> bool {
+                    let record = input.args.remove(0);
+
+                    // Filter out all error logs (they are propagated as rust errors)
+                    let levelname: PyRef<PyStr> = record
+                        .get_attr("levelname", vm)
+                        .expect("This should exist")
+                        .downcast()
+                        .expect("This should be a String");
+
+                    let return_value = levelname.as_str() != "ERROR";
+
+                    if log_enabled!(Level::Debug) && !return_value {
+                        let message: String = {
+                            let get_message = record.get_attr("getMessage", vm).expect("Is set");
+                            let message: PyRef<PyStr> = get_message
+                                .call((), vm)
+                                .expect("Can be called")
+                                .downcast()
+                                .expect("Downcasting works");
+
+                            message.as_str().to_owned()
+                        };
+
+                        debug!("Swollowed error message: '{message}'");
                     }
-                } else {
-                    (total_bytes, "")
+                    return_value
                 }
-            };
-            let percent: f64 = {
-                if total_bytes == 0 {
-                    100.0
-                } else {
-                    (downloaded_bytes as f64 / total_bytes as f64) * 100.0
+
+                let logging = setup_logging(vm, "yt_dlp")?;
+                let ytdl_logger = {
+                    let get_logger = logging.get_item("getLogger", vm)?;
+                    get_logger.call(("yt_dlp",), vm)?
+                };
+
+                {
+                    let args = FuncArgs::new(
+                        PosArgs::new(vec![]),
+                        KwArgs::new({
+                            let mut map = IndexMap::new();
+                            // Ensure that all events are logged by setting
+                            // the log level to NOTSET (we filter on rust's side)
+                            map.insert("level".to_owned(), vm.new_pyobj(0));
+                            map
+                        }),
+                    );
+
+                    let basic_config = logging.get_item("basicConfig", vm)?;
+                    basic_config.call(args, vm)?;
                 }
-            };
 
-            clear_whole_line();
-            move_to_col(1);
-
-            eprint!(
-                "'{}' [{}/{} at {}] -> [{} of {}{} {}] ",
-                c!("34;1", get_title()),
-                c!("33;1", Duration::from(Some(elapsed))),
-                c!("33;1", Duration::from(Some(eta))),
-                c!("32;1", format_speed(speed)),
-                c!("31;1", format_bytes(downloaded_bytes)),
-                c!("31;1", bytes_is_estimate),
-                c!("31;1", format_bytes(total_bytes)),
-                c!("36;1", format!("{:.02}%", percent))
-            );
-            stderr().flush()?;
-        }
-        "finished" => {
-            eprintln!("-> Finished downloading.");
-        }
-        "error" => {
-            // TODO: This should probably return an Err. But I'm not so sure where the error would
-            // bubble up to (i.e., who would catch it) <2025-01-21>
-            eprintln!("-> Error while downloading: {}", get_title());
-            process::exit(1);
-        }
-        other => unreachable!("'{other}' should not be a valid state!"),
-    };
+                {
+                    let add_filter = ytdl_logger.get_attr("addFilter", vm)?;
+                    add_filter.call(
+                        (vm.new_function("yt_dlp_error_filter", filter_error_log),),
+                        vm,
+                    )?;
+                }
 
-    Ok(())
-}
+                opts.set_item("logger", ytdl_logger, vm)?;
+            }
 
-pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python<'_>) -> PyResult<Bound<'a, PyDict>> {
-    if let Some(hooks) = opts.get_item("progress_hooks")? {
-        let hooks = hooks.downcast::<PyList>()?;
-        hooks.append(wrap_pyfunction!(progress_hook, py)?)?;
+            let youtube_dl_class = class.call((opts,), vm)?;
 
-        opts.set_item("progress_hooks", hooks)?;
-    } else {
-        // No hooks are set yet
-        let hooks_list = PyList::new(py, &[wrap_pyfunction!(progress_hook, py)?])?;
+            Ok::<_, PyRef<PyBaseException>>((yt_dlp_module, youtube_dl_class))
+        }) {
+            Ok(ok) => ok,
+            Err(err) => {
+                interpreter.finalize(Some(err));
+                return Err(build::Error::Python);
+            }
+        };
 
-        opts.set_item("progress_hooks", hooks_list)?;
+        Ok(Self {
+            interpreter,
+            youtube_dl_class,
+            yt_dlp_module,
+            options: output_options,
+        })
     }
 
-    Ok(opts)
-}
-
-/// Take the result of the ie (may be modified) and resolve all unresolved
-/// references (URLs, playlist items).
-///
-/// It will also download the videos if 'download'.
-/// Returns the resolved `ie_result`.
-#[allow(clippy::missing_panics_doc)]
-pub fn process_ie_result(
-    yt_dlp_opts: &Map<String, Value>,
-    ie_result: InfoJson,
-    download: bool,
-) -> Result<InfoJson, YtDlpError> {
-    Python::with_gil(|py| -> Result<InfoJson, YtDlpError> {
-        let opts = json_map_to_py_dict(yt_dlp_opts, py)?;
-
-        let instance = get_yt_dlp(py, opts)?;
-
-        let args = {
-            let ie_result = json_loads_str(py, ie_result)?;
-            (ie_result,)
-        };
+    /// # Panics
+    ///
+    /// If `yt_dlp` changed their location or type of `__version__`.
+    pub fn version(&self) -> String {
+        let str_ref: PyRef<PyStr> = self.interpreter.enter_and_expect(
+            |vm| {
+                let version_module = self.yt_dlp_module.get_attr("version", vm)?;
+                let version = version_module.get_attr("__version__", vm)?;
+                let version = version.downcast().expect("This should always be a string");
+                Ok(version)
+            },
+            "yt_dlp version location has changed",
+        );
+        str_ref.to_string()
+    }
 
-        let kwargs = PyDict::new(py);
-        kwargs.set_item("download", download)?;
+    /// Download a given list of URLs.
+    /// Returns the paths they were downloaded to.
+    ///
+    /// # Errors
+    /// If one of the downloads error.
+    pub fn download(&self, urls: &[Url]) -> Result<Vec<PathBuf>, extract_info::Error> {
+        let mut out_paths = Vec::with_capacity(urls.len());
+
+        for url in urls {
+            info!("Started downloading url: '{url}'");
+            let info_json = self.extract_info(url, true, true)?;
+
+            // Try to work around yt-dlp type weirdness
+            let result_string = if let Some(filename) = info_json.get("filename") {
+                PathBuf::from(json_cast!(filename, as_str))
+            } else {
+                PathBuf::from(json_get!(
+                    json_cast!(
+                        json_get!(info_json, "requested_downloads", as_array)[0],
+                        as_object
+                    ),
+                    "filename",
+                    as_str
+                ))
+            };
 
-        let result = instance
-            .call_method("process_ie_result", args, Some(&kwargs))?
-            .downcast_into::<PyDict>()
-            .expect("This is a dict");
+            out_paths.push(result_string);
+            info!("Finished downloading url");
+        }
 
-        let result_str = json_dumps(py, result.into_any())?;
+        Ok(out_paths)
+    }
 
-        serde_json::from_str(&result_str).map_err(Into::into)
-    })
-}
+    /// `extract_info(self, url, download=True, ie_key=None, extra_info=None, process=True, force_generic_extractor=False)`
+    ///
+    /// Extract and return the information dictionary of the URL
+    ///
+    /// Arguments:
+    /// - `url`          URL to extract
+    ///
+    /// Keyword arguments:
+    /// :`download`     Whether to download videos
+    /// :`process`      Whether to resolve all unresolved references (URLs, playlist items).
+    ///                 Must be True for download to work
+    ///
+    /// # Panics
+    /// If expectations about python fail to hold.
+    ///
+    /// # Errors
+    /// If python operations fail.
+    pub fn extract_info(
+        &self,
+        url: &Url,
+        download: bool,
+        process: bool,
+    ) -> Result<InfoJson, extract_info::Error> {
+        match self.interpreter.enter(|vm| {
+            let pos_args = PosArgs::new(vec![vm.new_pyobj(url.to_string())]);
+
+            let kw_args = KwArgs::new({
+                let mut map = IndexMap::new();
+                map.insert("download".to_owned(), vm.new_pyobj(download));
+                map.insert("process".to_owned(), vm.new_pyobj(process));
+                map
+            });
+
+            let fun_args = FuncArgs::new(pos_args, kw_args);
+
+            let inner = self.youtube_dl_class.get_attr("extract_info", vm)?;
+            let result = inner
+                .call_with_args(fun_args, vm)?
+                .downcast::<PyDict>()
+                .expect("This is a dict");
+
+            // Resolve the generator object
+            if let Ok(generator) = result.get_item("entries", vm) {
+                if generator.payload_is::<PyList>() {
+                    // already resolved. Do nothing
+                } else {
+                    let max_backlog = self.options.get("playlistend").map_or(10000, |value| {
+                        usize::try_from(value.as_u64().expect("Works")).expect("Should work")
+                    });
+
+                    let mut out = vec![];
+                    let next = generator.get_attr("__next__", vm)?;
+                    while let Ok(output) = next.call((), vm) {
+                        out.push(output);
+
+                        if out.len() == max_backlog {
+                            break;
+                        }
+                    }
+                    result.set_item("entries", vm.new_pyobj(out), vm)?;
+                }
+            }
 
-/// `extract_info(self, url, download=True, ie_key=None, extra_info=None, process=True, force_generic_extractor=False)`
-///
-/// Extract and return the information dictionary of the URL
-///
-/// Arguments:
-/// @param url          URL to extract
-///
-/// Keyword arguments:
-/// @param download     Whether to download videos
-/// @param process      Whether to resolve all unresolved references (URLs, playlist items).
-///                     Must be True for download to work
-/// @param `ie_key`       Use only the extractor with this key
-///
-/// @param `extra_info`   Dictionary containing the extra values to add to the info (For internal use only)
-/// @`force_generic_extractor`  Force using the generic extractor (Deprecated; use `ie_key`='Generic')
-#[allow(clippy::missing_panics_doc)]
-pub fn extract_info(
-    yt_dlp_opts: &Map<String, Value>,
-    url: &Url,
-    download: bool,
-    process: bool,
-) -> Result<InfoJson, YtDlpError> {
-    Python::with_gil(|py| -> Result<InfoJson, YtDlpError> {
-        let opts = json_map_to_py_dict(yt_dlp_opts, py)?;
-
-        let instance = get_yt_dlp(py, opts)?;
-        let args = (url.as_str(),);
-
-        let kwargs = PyDict::new(py);
-        kwargs.set_item("download", download)?;
-        kwargs.set_item("process", process)?;
-
-        let result = instance
-            .call_method("extract_info", args, Some(&kwargs))?
-            .downcast_into::<PyDict>()
-            .expect("This is a dict");
-
-        // Resolve the generator object
-        if let Some(generator) = result.get_item("entries")? {
-            if generator.is_instance_of::<PyList>() {
-                // already resolved. Do nothing
-            } else {
-                let max_backlog = yt_dlp_opts.get("playlistend").map_or(10000, |value| {
-                    usize::try_from(value.as_u64().expect("Works")).expect("Should work")
-                });
+            let result = {
+                let sanitize = self.youtube_dl_class.get_attr("sanitize_info", vm)?;
+                let value = sanitize.call((result,), vm)?;
 
-                let mut out = vec![];
-                while let Ok(output) = generator.call_method0("__next__") {
-                    out.push(output);
+                value.downcast::<PyDict>().expect("This should stay a dict")
+            };
 
-                    if out.len() == max_backlog {
-                        break;
-                    }
+            let result_json = json_dumps(result, vm);
+
+            if let Ok(confirm) = env::var("YT_STORE_INFO_JSON") {
+                if confirm == "yes" {
+                    let mut file = File::create("output.info.json").unwrap();
+                    write!(
+                        file,
+                        "{}",
+                        serde_json::to_string_pretty(&serde_json::Value::Object(
+                            result_json.clone()
+                        ))
+                        .expect("Valid json")
+                    )
+                    .unwrap();
                 }
-                result.set_item("entries", out)?;
+            }
+
+            Ok::<_, PyRef<PyBaseException>>(result_json)
+        }) {
+            Ok(ok) => Ok(ok),
+            Err(err) => {
+                self.interpreter.enter(|vm| {
+                    vm.print_exception(err);
+                });
+                Err(extract_info::Error::Python)
             }
         }
+    }
+
+    /// Take the (potentially modified) result of the information extractor (i.e.,
+    /// [`Self::extract_info`] with `process` and `download` set to false)
+    /// and resolve all unresolved references (URLs,
+    /// playlist items).
+    ///
+    /// It will also download the videos if 'download' is true.
+    /// Returns the resolved `ie_result`.
+    ///
+    /// # Panics
+    /// If expectations about python fail to hold.
+    ///
+    /// # Errors
+    /// If python operations fail.
+    pub fn process_ie_result(
+        &self,
+        ie_result: InfoJson,
+        download: bool,
+    ) -> Result<InfoJson, process_ie_result::Error> {
+        match self.interpreter.enter(|vm| {
+            let pos_args = PosArgs::new(vec![vm.new_pyobj(json_loads(ie_result, vm))]);
+
+            let kw_args = KwArgs::new({
+                let mut map = IndexMap::new();
+                map.insert("download".to_owned(), vm.new_pyobj(download));
+                map
+            });
+
+            let fun_args = FuncArgs::new(pos_args, kw_args);
+
+            let inner = self.youtube_dl_class.get_attr("process_ie_result", vm)?;
+            let result = inner
+                .call_with_args(fun_args, vm)?
+                .downcast::<PyDict>()
+                .expect("This is a dict");
+
+            let result = {
+                let sanitize = self.youtube_dl_class.get_attr("sanitize_info", vm)?;
+                let value = sanitize.call((result,), vm)?;
+
+                value.downcast::<PyDict>().expect("This should stay a dict")
+            };
 
-        let result_str = json_dumps(py, result.into_any())?;
+            let result_json = json_dumps(result, vm);
 
-        if let Ok(confirm) = env::var("YT_STORE_INFO_JSON") {
-            if confirm == "yes" {
-                let mut file = File::create("output.info.json")?;
-                write!(file, "{result_str}").unwrap();
+            Ok::<_, PyRef<PyBaseException>>(result_json)
+        }) {
+            Ok(ok) => Ok(ok),
+            Err(err) => {
+                self.interpreter.enter(|vm| {
+                    vm.print_exception(err);
+                });
+                Err(process_ie_result::Error::Python)
             }
         }
-
-        serde_json::from_str(&result_str).map_err(Into::into)
-    })
+    }
 }
 
-/// # Panics
-/// Only if python fails to return a valid URL.
-pub fn unsmuggle_url(smug_url: &Url) -> PyResult<Url> {
-    Python::with_gil(|py| {
-        let utils = get_yt_dlp_utils(py)?;
-        let url = utils
-            .call_method1("unsmuggle_url", (smug_url.as_str(),))?
-            .downcast::<PyTuple>()?
-            .get_item(0)?;
-
-        let url: Url = url
-            .downcast::<PyString>()?
-            .to_string()
-            .parse()
-            .expect("Python should be able to return a valid url");
-
-        Ok(url)
-    })
+#[allow(missing_docs)]
+pub mod process_ie_result {
+    #[derive(Debug, thiserror::Error, Clone, Copy)]
+    pub enum Error {
+        #[error("Python threw an exception")]
+        Python,
+    }
 }
-
-/// Download a given list of URLs.
-/// Returns the paths they were downloaded to.
-///
-/// # Panics
-/// Only if `yt_dlp` changes their `info_json` schema.
-pub fn download(
-    urls: &[Url],
-    download_options: &Map<String, Value>,
-) -> Result<Vec<PathBuf>, YtDlpError> {
-    let mut out_paths = Vec::with_capacity(urls.len());
-
-    for url in urls {
-        info!("Started downloading url: '{}'", url);
-        let info_json = extract_info(download_options, url, true, true)?;
-
-        // Try to work around yt-dlp type weirdness
-        let result_string = if let Some(filename) = info_json.filename {
-            filename
-        } else {
-            info_json.requested_downloads.expect("This must exist")[0]
-                .filename
-                .clone()
-        };
-
-        out_paths.push(result_string);
-        info!("Finished downloading url: '{}'", url);
+#[allow(missing_docs)]
+pub mod extract_info {
+    #[derive(Debug, thiserror::Error, Clone, Copy)]
+    pub enum Error {
+        #[error("Python threw an exception")]
+        Python,
     }
-
-    Ok(out_paths)
 }
 
-fn json_map_to_py_dict<'a>(
-    map: &Map<String, Value>,
-    py: Python<'a>,
-) -> PyResult<Bound<'a, PyDict>> {
-    let json_string = serde_json::to_string(&map).expect("This must always work");
+pub type InfoJson = serde_json::Map<String, serde_json::Value>;
+pub type ProgressHookFunction = fn(input: FuncArgs, vm: &VirtualMachine);
 
-    let python_dict = json_loads(py, json_string)?;
-
-    Ok(python_dict)
+/// Options, that are used to customize the download behaviour.
+///
+/// In the future, this might get a Builder api.
+///
+/// See `help(yt_dlp.YoutubeDL())` from python for a full list of available options.
+#[derive(Default, Debug)]
+pub struct YoutubeDLOptions {
+    options: serde_json::Map<String, serde_json::Value>,
+    progress_hook: Option<ProgressHookFunction>,
 }
 
-fn json_dumps(py: Python<'_>, input: Bound<'_, PyAny>) -> PyResult<String> {
-    //     json.dumps(yt_dlp.sanitize_info(input))
+impl YoutubeDLOptions {
+    #[must_use]
+    pub fn new() -> Self {
+        Self {
+            options: serde_json::Map::new(),
+            progress_hook: None,
+        }
+    }
 
-    let yt_dlp = get_yt_dlp(py, PyDict::new(py))?;
-    let sanitized_result = yt_dlp.call_method1("sanitize_info", (input,))?;
+    #[must_use]
+    pub fn set(self, key: impl Into<String>, value: impl Into<serde_json::Value>) -> Self {
+        let mut options = self.options;
+        options.insert(key.into(), value.into());
 
-    let json = PyModule::import(py, "json")?;
-    let dumps = json.getattr("dumps")?;
+        Self {
+            options,
+            progress_hook: self.progress_hook,
+        }
+    }
 
-    let output = dumps.call1((sanitized_result,))?;
+    #[must_use]
+    pub fn with_progress_hook(self, progress_hook: ProgressHookFunction) -> Self {
+        if let Some(_previous_hook) = self.progress_hook {
+            todo!()
+        } else {
+            Self {
+                options: self.options,
+                progress_hook: Some(progress_hook),
+            }
+        }
+    }
 
-    let output_str = output.extract::<String>()?;
+    /// # Errors
+    /// If the underlying [`YoutubeDL::from_options`] errors.
+    pub fn build(self) -> Result<YoutubeDL, build::Error> {
+        YoutubeDL::from_options(self)
+    }
 
-    Ok(output_str)
-}
+    #[must_use]
+    pub fn from_json_options(options: serde_json::Map<String, serde_json::Value>) -> Self {
+        Self {
+            options,
+            progress_hook: None,
+        }
+    }
 
-fn json_loads_str<T: Serialize>(py: Python<'_>, input: T) -> PyResult<Bound<'_, PyDict>> {
-    let string = serde_json::to_string(&input).expect("Correct json must be pased");
+    #[must_use]
+    pub fn get(&self, key: &str) -> Option<&serde_json::Value> {
+        self.options.get(key)
+    }
 
-    json_loads(py, string)
+    fn into_py_dict(self, vm: &VirtualMachine) -> PyRef<PyDict> {
+        json_loads(self.options, vm)
+    }
 }
 
-fn json_loads(py: Python<'_>, input: String) -> PyResult<Bound<'_, PyDict>> {
-    //     json.loads(input)
-
-    let json = PyModule::import(py, "json")?;
-    let dumps = json.getattr("loads")?;
+#[allow(missing_docs)]
+pub mod build {
+    #[derive(Debug, thiserror::Error)]
+    pub enum Error {
+        #[error("Python threw an exception")]
+        Python,
 
-    let output = dumps.call1((input,))?;
-
-    Ok(output
-        .downcast::<PyDict>()
-        .expect("This should always be a PyDict")
-        .clone())
+        #[error("Io error: {0}")]
+        Io(#[from] std::io::Error),
+    }
 }
 
-fn get_yt_dlp_utils(py: Python<'_>) -> PyResult<Bound<'_, PyAny>> {
-    let yt_dlp = PyModule::import(py, "yt_dlp")?;
-    let utils = yt_dlp.getattr("utils")?;
-
-    Ok(utils)
+fn json_loads(
+    input: serde_json::Map<String, serde_json::Value>,
+    vm: &VirtualMachine,
+) -> PyRef<PyDict> {
+    let json = vm.import("json", 0).expect("Module exists");
+    let loads = json.get_attr("loads", vm).expect("Method exists");
+    let self_str = serde_json::to_string(&serde_json::Value::Object(input)).expect("Vaild json");
+    let dict = loads
+        .call((self_str,), vm)
+        .expect("Vaild json is always a valid dict");
+
+    dict.downcast().expect("Should always be a dict")
 }
-fn get_yt_dlp<'a>(py: Python<'a>, opts: Bound<'a, PyDict>) -> PyResult<Bound<'a, PyAny>> {
-    // Unconditionally set a logger
-    let opts = add_logger_and_sig_handler(opts, py)?;
-    let opts = add_hooks(opts, py)?;
 
-    let yt_dlp = PyModule::import(py, "yt_dlp")?;
-    let youtube_dl = yt_dlp.call_method1("YoutubeDL", (opts,))?;
-
-    Ok(youtube_dl)
+/// # Panics
+/// If expectation about python operations fail.
+pub fn json_dumps(
+    input: PyRef<PyDict>,
+    vm: &VirtualMachine,
+) -> serde_json::Map<String, serde_json::Value> {
+    let json = vm.import("json", 0).expect("Module exists");
+    let dumps = json.get_attr("dumps", vm).expect("Method exists");
+    let dict = dumps
+        .call((input,), vm)
+        .map_err(|err| vm.print_exception(err))
+        .expect("Might not always work, but for our dicts it works");
+
+    let string: PyRef<PyStr> = dict.downcast().expect("Should always be a string");
+
+    let real_string = string.to_str().expect("Should be valid utf8");
+
+    // {
+    //     let mut file = File::create("debug.dump.json").unwrap();
+    //     write!(file, "{}", real_string).unwrap();
+    // }
+
+    let value: serde_json::Value = serde_json::from_str(real_string).expect("Should be valid json");
+
+    match value {
+        serde_json::Value::Object(map) => map,
+        _ => unreachable!("These should not be json.dumps output"),
+    }
 }
diff --git a/crates/yt_dlp/src/logging.rs b/crates/yt_dlp/src/logging.rs
index e731502..5cb4c1d 100644
--- a/crates/yt_dlp/src/logging.rs
+++ b/crates/yt_dlp/src/logging.rs
@@ -10,34 +10,66 @@
 
 // This file is taken from: https://github.com/dylanbstorey/pyo3-pylogger/blob/d89e0d6820ebc4f067647e3b74af59dbc4941dd5/src/lib.rs
 // It is licensed under the Apache 2.0 License, copyright up to 2024 by Dylan Storey
-// It was modified by Benedikt Peetz 2024
-
-// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
-#![allow(unsafe_op_in_unsafe_fn)]
-
-use std::ffi::CString;
+// It was modified by Benedikt Peetz 2024, 2025
 
 use log::{Level, MetadataBuilder, Record, logger};
-use pyo3::{
-    Bound, PyAny, PyResult, Python,
-    prelude::{PyAnyMethods, PyListMethods, PyModuleMethods},
-    pyfunction, wrap_pyfunction,
+use rustpython::vm::{
+    PyObjectRef, PyRef, PyResult, VirtualMachine,
+    builtins::{PyInt, PyList, PyStr},
+    convert::ToPyObject,
+    function::FuncArgs,
 };
 
 /// Consume a Python `logging.LogRecord` and emit a Rust `Log` instead.
-#[allow(clippy::needless_pass_by_value)]
-#[pyfunction]
-fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
-    let level = record.getattr("levelno")?;
-    let message = record.getattr("getMessage")?.call0()?.to_string();
-    let pathname = record.getattr("pathname")?.to_string();
-    let lineno = record
-        .getattr("lineno")?
-        .to_string()
-        .parse::<u32>()
-        .expect("This should always be a u32");
-
-    let logger_name = record.getattr("name")?.to_string();
+fn host_log(mut input: FuncArgs, vm: &VirtualMachine) -> PyResult<()> {
+    let record = input.args.remove(0);
+    let rust_target = {
+        let base: PyRef<PyStr> = input.args.remove(0).downcast().expect("Should be a string");
+        base.as_str().to_owned()
+    };
+
+    let level = {
+        let level: PyRef<PyInt> = record
+            .get_attr("levelno", vm)?
+            .downcast()
+            .expect("Should always be an int");
+        level.as_u32_mask()
+    };
+    let message = {
+        let get_message = record.get_attr("getMessage", vm)?;
+        let message: PyRef<PyStr> = get_message
+            .call((), vm)?
+            .downcast()
+            .expect("Downcasting works");
+
+        message.as_str().to_owned()
+    };
+
+    let pathname = {
+        let pathname: PyRef<PyStr> = record
+            .get_attr("pathname", vm)?
+            .downcast()
+            .expect("Is a string");
+
+        pathname.as_str().to_owned()
+    };
+
+    let lineno = {
+        let lineno: PyRef<PyInt> = record
+            .get_attr("lineno", vm)?
+            .downcast()
+            .expect("Is a number");
+
+        lineno.as_u32_mask()
+    };
+
+    let logger_name = {
+        let name: PyRef<PyStr> = record
+            .get_attr("name", vm)?
+            .downcast()
+            .expect("Should be a string");
+        name.as_str().to_owned()
+    };
 
     let full_target: Option<String> = if logger_name.trim().is_empty() || logger_name == "root" {
         None
@@ -48,25 +80,25 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
         Some(format!("{rust_target}::{logger_name}"))
     };
 
-    let target = full_target.as_deref().unwrap_or(rust_target);
+    let target = full_target.as_deref().unwrap_or(&rust_target);
 
     // error
-    let error_metadata = if level.ge(40u8)? {
+    let error_metadata = if level >= 40 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Error)
             .build()
-    } else if level.ge(30u8)? {
+    } else if level >= 30 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Warn)
             .build()
-    } else if level.ge(20u8)? {
+    } else if level >= 20 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Info)
             .build()
-    } else if level.ge(10u8)? {
+    } else if level >= 10 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Debug)
@@ -98,13 +130,24 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
 /// # Panics
 /// Only if internal assertions fail.
 #[allow(clippy::module_name_repetitions)]
-pub fn setup_logging(py: Python<'_>, target: &str) -> PyResult<()> {
-    let logging = py.import("logging")?;
+pub(super) fn setup_logging(vm: &VirtualMachine, target: &str) -> PyResult<PyObjectRef> {
+    let logging = vm.import("logging", 0)?;
 
-    logging.setattr("host_log", wrap_pyfunction!(host_log, &logging)?)?;
+    let scope = vm.new_scope_with_builtins();
 
-    py.run(
-        CString::new(format!(
+    for (key, value) in logging.dict().expect("Should be a dict") {
+        let key: PyRef<PyStr> = key.downcast().expect("Is a string");
+
+        scope.globals.set_item(key.as_str(), value, vm)?;
+    }
+    scope
+        .globals
+        .set_item("host_log", vm.new_function("host_log", host_log).into(), vm)?;
+
+    let local_scope = scope.clone();
+    vm.run_code_string(
+        local_scope,
+        format!(
             r#"
 class HostHandler(Handler):
     def __init__(self, level=0):
@@ -119,15 +162,36 @@ def basicConfig(*pargs, **kwargs):
         kwargs["handlers"] = [HostHandler()]
     return oldBasicConfig(*pargs, **kwargs)
 "#
-        ))
-        .expect("This is hardcoded")
-        .as_c_str(),
-        Some(&logging.dict()),
-        None,
+        )
+        .as_str(),
+        "<embedded logging inintializing code>".to_owned(),
     )?;
 
-    let all = logging.index()?;
-    all.append("HostHandler")?;
-
-    Ok(())
+    let all: PyRef<PyList> = logging
+        .get_attr("__all__", vm)?
+        .downcast()
+        .expect("Is a list");
+    all.borrow_vec_mut().push(vm.new_pyobj("HostHandler"));
+
+    // {
+    //     let logging_dict = logging.dict().expect("Exists");
+    //
+    //     for (key, val) in scope.globals {
+    //         let key: PyRef<PyStr> = key.downcast().expect("Is a string");
+    //
+    //         if !logging_dict.contains_key(key.as_str(), vm) {
+    //             logging_dict.set_item(key.as_str(), val, vm)?;
+    //         }
+    //     }
+    //
+    //     for (key, val) in scope.locals {
+    //         let key: PyRef<PyStr> = key.downcast().expect("Is a string");
+    //
+    //         if !logging_dict.contains_key(key.as_str(), vm) {
+    //             logging_dict.set_item(key.as_str(), val, vm)?;
+    //         }
+    //     }
+    // }
+
+    Ok(scope.globals.to_pyobject(vm))
 }
diff --git a/crates/yt_dlp/src/progress_hook.rs b/crates/yt_dlp/src/progress_hook.rs
new file mode 100644
index 0000000..7a7628a
--- /dev/null
+++ b/crates/yt_dlp/src/progress_hook.rs
@@ -0,0 +1,41 @@
+#[macro_export]
+macro_rules! mk_python_function {
+    ($name:ident, $new_name:ident) => {
+        pub fn $new_name(
+            mut args: $crate::progress_hook::rustpython::vm::function::FuncArgs,
+            vm: &$crate::progress_hook::rustpython::vm::VirtualMachine,
+        ) {
+            use $crate::progress_hook::rustpython;
+
+            let input = {
+                let dict: rustpython::vm::PyRef<rustpython::vm::builtins::PyDict> = args
+                    .args
+                    .remove(0)
+                    .downcast()
+                    .expect("The progress hook is always called with these args");
+                let new_dict = rustpython::vm::builtins::PyDict::new_ref(&vm.ctx);
+                dict.into_iter()
+                    .filter_map(|(name, value)| {
+                        let real_name: rustpython::vm::PyRefExact<rustpython::vm::builtins::PyStr> =
+                            name.downcast_exact(vm).expect("Is a string");
+                        let name_str = real_name.to_str().expect("Is a string");
+                        if name_str.starts_with('_') {
+                            None
+                        } else {
+                            Some((name_str.to_owned(), value))
+                        }
+                    })
+                    .for_each(|(key, value)| {
+                        new_dict
+                            .set_item(&key, value, vm)
+                            .expect("This is a transpositions, should always be valid");
+                    });
+
+                $crate::json_dumps(new_dict, vm)
+            };
+            $name(input).expect("Shall not fail!");
+        }
+    };
+}
+
+pub use rustpython;
diff --git a/crates/yt_dlp/src/python_json_decode_failed.error_msg b/crates/yt_dlp/src/python_json_decode_failed.error_msg
deleted file mode 100644
index d10688e..0000000
--- a/crates/yt_dlp/src/python_json_decode_failed.error_msg
+++ /dev/null
@@ -1,5 +0,0 @@
-Failed to decode yt-dlp's response: {}
-
-This is probably a bug.
-Try running the command again with the `YT_STORE_INFO_JSON=yes` environment variable set
-and maybe debug it further via `yt check info-json output.info.json`.
diff --git a/crates/yt_dlp/src/python_json_decode_failed.error_msg.license b/crates/yt_dlp/src/python_json_decode_failed.error_msg.license
deleted file mode 100644
index 7813eb6..0000000
--- a/crates/yt_dlp/src/python_json_decode_failed.error_msg.license
+++ /dev/null
@@ -1,9 +0,0 @@
-yt - A fully featured command line YouTube client
-
-Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-SPDX-License-Identifier: GPL-3.0-or-later
-
-This file is part of Yt.
-
-You should have received a copy of the License along with this program.
-If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
diff --git a/crates/yt_dlp/src/tests.rs b/crates/yt_dlp/src/tests.rs
deleted file mode 100644
index 91b6626..0000000
--- a/crates/yt_dlp/src/tests.rs
+++ /dev/null
@@ -1,89 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use std::sync::LazyLock;
-
-use serde_json::{Value, json};
-use url::Url;
-
-static YT_OPTS: LazyLock<serde_json::Map<String, Value>> = LazyLock::new(|| {
-    match json!({
-        "playliststart": 1,
-        "playlistend": 10,
-        "noplaylist": false,
-        "extract_flat": false,
-    }) {
-        Value::Object(obj) => obj,
-        _ => unreachable!("This json is hardcoded"),
-    }
-});
-
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_video() {
-    let info = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/watch?v=dbjPnXaacAU").expect("Is valid."),
-        false,
-        false,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{info:#?}");
-}
-
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_url() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://google.com").expect("Is valid."),
-        false,
-        false,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
-
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_playlist() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/@TheGarriFrischer/videos").expect("Is valid."),
-        false,
-        true,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_playlist_full() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/@NixOS-Foundation/videos").expect("Is valid."),
-        false,
-        true,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
diff --git a/crates/yt_dlp/src/wrapper/info_json.rs b/crates/yt_dlp/src/wrapper/info_json.rs
deleted file mode 100644
index ea73d26..0000000
--- a/crates/yt_dlp/src/wrapper/info_json.rs
+++ /dev/null
@@ -1,827 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// `yt_dlp` named them like this.
-#![allow(clippy::pub_underscore_fields)]
-
-use std::{collections::HashMap, path::PathBuf};
-
-use pyo3::{Bound, PyResult, Python, types::PyDict};
-use serde::{Deserialize, Deserializer, Serialize};
-use serde_json::Value;
-use url::Url;
-
-use crate::json_loads_str;
-
-type Todo = String;
-type Extractor = String;
-type ExtractorKey = String;
-
-// TODO: Change this to map `_type` to a structure of values, instead of the options <2024-05-27>
-// And replace all the strings with better types (enums or urls)
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct InfoJson {
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __files_to_move: Option<FilesToMove>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __last_playlist_index: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __post_extractor: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __x_forwarded_for_ip: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _filename: Option<PathBuf>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _format_sort_fields: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _has_drm: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _type: Option<InfoType>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _version: Option<Version>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub abr: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub acodec: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub age_limit: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub artists: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub aspect_ratio: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub asr: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub audio_channels: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub audio_ext: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub automatic_captions: Option<HashMap<String, Vec<Caption>>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub availability: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub average_rating: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub categories: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_follower_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_is_verified: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_url: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub chapters: Option<Vec<Chapter>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub comment_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub comments: Option<Vec<Comment>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub concurrent_view_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub container: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub description: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub direct: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub display_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub downloader_options: Option<DownloaderOptions>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub duration: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub duration_string: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub dynamic_range: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub entries: Option<Vec<InfoJson>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub episode: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub episode_number: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub epoch: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub ext: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub extractor: Option<Extractor>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub extractor_key: Option<ExtractorKey>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub filename: Option<PathBuf>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub filesize: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub filesize_approx: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format_index: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format_note: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub formats: Option<Vec<Format>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub fps: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub fulltitle: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub genre: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub genres: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub has_drm: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub heatmap: Option<Vec<HeatMapEntry>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub height: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub hls_aes: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub http_headers: Option<HttpHeader>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub ie_key: Option<ExtractorKey>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub is_live: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub language: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub language_preference: Option<i32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub license: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub like_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub live_status: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub location: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub manifest_url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub media_type: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub modified_date: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub n_entries: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub original_url: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playable_in_embed: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_autonumber: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_channel: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_channel_id: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_id: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_index: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_title: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_uploader: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_uploader_id: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_webpage_url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub preference: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub protocol: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub quality: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub release_date: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub release_timestamp: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub release_year: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub repost_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_downloads: Option<Vec<RequestedDownloads>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_entries: Option<Vec<u32>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_formats: Option<Vec<Format>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_subtitles: Option<HashMap<String, Subtitle>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub resolution: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub season: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub season_number: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub series: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub source_preference: Option<i32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub sponsorblock_chapters: Option<Vec<SponsorblockChapter>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub start_time: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub stretched_ratio: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub subtitles: Option<HashMap<String, Vec<Caption>>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub tags: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub tbr: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub thumbnail: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub thumbnails: Option<Vec<ThumbNail>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub timestamp: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub title: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub upload_date: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub uploader: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub uploader_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub uploader_url: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub vbr: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub vcodec: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub video_ext: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub view_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub was_live: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub webpage_url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub webpage_url_basename: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub webpage_url_domain: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct FilesToMove {}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct RequestedDownloads {
-    pub __files_to_merge: Option<Vec<Todo>>,
-    pub __finaldir: PathBuf,
-    pub __infojson_filename: PathBuf,
-    pub __postprocessors: Vec<Todo>,
-    pub __real_download: bool,
-    pub __write_download_archive: bool,
-    pub _filename: PathBuf,
-    pub _type: InfoType,
-    pub _version: Version,
-    pub abr: f64,
-    pub acodec: String,
-    pub aspect_ratio: Option<f64>,
-    pub asr: Option<u32>,
-    pub audio_channels: Option<u32>,
-    pub audio_ext: Option<String>,
-    pub chapters: Option<Vec<SponsorblockChapter>>,
-    pub duration: Option<f64>,
-    pub dynamic_range: Option<String>,
-    pub ext: String,
-    pub filename: PathBuf,
-    pub filepath: PathBuf,
-    pub filesize_approx: Option<u64>,
-    pub format: String,
-    pub format_id: String,
-    pub format_note: Option<String>,
-    pub fps: Option<f64>,
-    pub has_drm: Option<bool>,
-    pub height: Option<u32>,
-    pub http_headers: Option<HttpHeader>,
-    pub infojson_filename: PathBuf,
-    pub language: Option<String>,
-    pub manifest_url: Option<Url>,
-    pub protocol: String,
-    pub quality: Option<i64>,
-    pub requested_formats: Option<Vec<Format>>,
-    pub resolution: String,
-    pub tbr: f64,
-    pub url: Option<Url>,
-    pub vbr: f64,
-    pub vcodec: String,
-    pub video_ext: Option<String>,
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Subtitle {
-    pub ext: SubtitleExt,
-    pub filepath: PathBuf,
-    pub filesize: Option<u64>,
-    pub fragment_base_url: Option<Url>,
-    pub fragments: Option<Vec<Fragment>>,
-    pub manifest_url: Option<Url>,
-    pub name: Option<String>,
-    pub protocol: Option<Todo>,
-    pub url: Url,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-pub enum SubtitleExt {
-    #[serde(alias = "vtt")]
-    Vtt,
-
-    #[serde(alias = "mp4")]
-    Mp4,
-
-    #[serde(alias = "json")]
-    Json,
-    #[serde(alias = "json3")]
-    Json3,
-
-    #[serde(alias = "ttml")]
-    Ttml,
-
-    #[serde(alias = "srv1")]
-    Srv1,
-    #[serde(alias = "srv2")]
-    Srv2,
-    #[serde(alias = "srv3")]
-    Srv3,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Caption {
-    pub ext: SubtitleExt,
-    pub filepath: Option<PathBuf>,
-    pub filesize: Option<u64>,
-    pub fragments: Option<Vec<SubtitleFragment>>,
-    pub fragment_base_url: Option<Url>,
-    pub manifest_url: Option<Url>,
-    pub name: Option<String>,
-    pub protocol: Option<String>,
-    pub url: String,
-    pub video_id: Option<String>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct SubtitleFragment {
-    path: PathBuf,
-    duration: Option<f64>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Chapter {
-    pub end_time: f64,
-    pub start_time: f64,
-    pub title: String,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct SponsorblockChapter {
-    /// This is an utterly useless field, and should thus be ignored
-    pub _categories: Option<Vec<Vec<Value>>>,
-
-    pub categories: Option<Vec<SponsorblockChapterCategory>>,
-    pub category: Option<SponsorblockChapterCategory>,
-    pub category_names: Option<Vec<String>>,
-    pub end_time: f64,
-    pub name: Option<String>,
-    pub r#type: Option<SponsorblockChapterType>,
-    pub start_time: f64,
-    pub title: String,
-}
-
-pub fn get_none<'de, D, T>(_: D) -> Result<Option<T>, D::Error>
-where
-    D: Deserializer<'de>,
-{
-    Ok(None)
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum SponsorblockChapterType {
-    #[serde(alias = "skip")]
-    Skip,
-
-    #[serde(alias = "chapter")]
-    Chapter,
-
-    #[serde(alias = "poi")]
-    Poi,
-}
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum SponsorblockChapterCategory {
-    #[serde(alias = "filler")]
-    Filler,
-
-    #[serde(alias = "interaction")]
-    Interaction,
-
-    #[serde(alias = "music_offtopic")]
-    MusicOfftopic,
-
-    #[serde(alias = "poi_highlight")]
-    PoiHighlight,
-
-    #[serde(alias = "preview")]
-    Preview,
-
-    #[serde(alias = "sponsor")]
-    Sponsor,
-
-    #[serde(alias = "selfpromo")]
-    SelfPromo,
-
-    #[serde(alias = "chapter")]
-    Chapter,
-
-    #[serde(alias = "intro")]
-    Intro,
-
-    #[serde(alias = "outro")]
-    Outro,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct HeatMapEntry {
-    pub start_time: f64,
-    pub end_time: f64,
-    pub value: f64,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum InfoType {
-    #[serde(alias = "playlist")]
-    #[serde(rename(serialize = "playlist"))]
-    Playlist,
-
-    #[serde(alias = "url")]
-    #[serde(rename(serialize = "url"))]
-    Url,
-
-    #[serde(alias = "video")]
-    #[serde(rename(serialize = "video"))]
-    Video,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct Version {
-    pub current_git_head: Option<String>,
-    pub release_git_head: String,
-    pub repository: String,
-    pub version: String,
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(from = "String")]
-#[serde(deny_unknown_fields)]
-pub enum Parent {
-    Root,
-    Id(String),
-}
-
-impl Parent {
-    #[must_use]
-    pub fn id(&self) -> Option<&str> {
-        if let Self::Id(id) = self {
-            Some(id)
-        } else {
-            None
-        }
-    }
-}
-
-impl From<String> for Parent {
-    fn from(value: String) -> Self {
-        if value == "root" {
-            Self::Root
-        } else {
-            Self::Id(value)
-        }
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(from = "String")]
-#[serde(deny_unknown_fields)]
-pub struct Id {
-    pub id: String,
-}
-impl From<String> for Id {
-    fn from(value: String) -> Self {
-        Self {
-            // Take the last element if the string is split with dots, otherwise take the full id
-            id: value.split('.').last().unwrap_or(&value).to_owned(),
-        }
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-#[allow(clippy::struct_excessive_bools)]
-pub struct Comment {
-    pub id: Id,
-    pub text: String,
-    #[serde(default = "zero")]
-    pub like_count: u32,
-    pub is_pinned: bool,
-    pub author_id: String,
-    #[serde(default = "unknown")]
-    pub author: String,
-    pub author_is_verified: bool,
-    pub author_thumbnail: Url,
-    pub parent: Parent,
-    #[serde(deserialize_with = "edited_from_time_text", alias = "_time_text")]
-    pub edited: bool,
-    // Can't also be deserialized, as it's already used in 'edited'
-    // _time_text: String,
-    pub timestamp: i64,
-    pub author_url: Option<Url>,
-    pub author_is_uploader: bool,
-    pub is_favorited: bool,
-}
-fn unknown() -> String {
-    "<Unknown>".to_string()
-}
-fn zero() -> u32 {
-    0
-}
-fn edited_from_time_text<'de, D>(d: D) -> Result<bool, D::Error>
-where
-    D: Deserializer<'de>,
-{
-    let s = String::deserialize(d)?;
-    if s.contains(" (edited)") {
-        Ok(true)
-    } else {
-        Ok(false)
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct ThumbNail {
-    pub id: Option<String>,
-    pub preference: Option<i32>,
-    /// in the form of "[`height`]x[`width`]"
-    pub resolution: Option<String>,
-    pub url: Url,
-    pub width: Option<u32>,
-    pub height: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Format {
-    pub __needs_testing: Option<bool>,
-    pub __working: Option<bool>,
-    pub abr: Option<f64>,
-    pub acodec: Option<String>,
-    pub aspect_ratio: Option<f64>,
-    pub asr: Option<f64>,
-    pub audio_channels: Option<u32>,
-    pub audio_ext: Option<String>,
-    pub columns: Option<u32>,
-    pub container: Option<String>,
-    pub downloader_options: Option<DownloaderOptions>,
-    pub dynamic_range: Option<String>,
-    pub ext: String,
-    pub filepath: Option<PathBuf>,
-    pub filesize: Option<u64>,
-    pub filesize_approx: Option<u64>,
-    pub format: Option<String>,
-    pub format_id: String,
-    pub format_index: Option<String>,
-    pub format_note: Option<String>,
-    pub fps: Option<f64>,
-    pub fragment_base_url: Option<Todo>,
-    pub fragments: Option<Vec<Fragment>>,
-    pub has_drm: Option<bool>,
-    pub height: Option<u32>,
-    pub http_headers: Option<HttpHeader>,
-    pub is_dash_periods: Option<bool>,
-    pub is_live: Option<bool>,
-    pub language: Option<String>,
-    pub language_preference: Option<i32>,
-    pub manifest_stream_number: Option<u32>,
-    pub manifest_url: Option<Url>,
-    pub preference: Option<i32>,
-    pub protocol: Option<String>,
-    pub quality: Option<f64>,
-    pub resolution: Option<String>,
-    pub rows: Option<u32>,
-    pub source_preference: Option<i32>,
-    pub tbr: Option<f64>,
-    pub url: Url,
-    pub vbr: Option<f64>,
-    pub vcodec: String,
-    pub video_ext: Option<String>,
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct DownloaderOptions {
-    http_chunk_size: u64,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct HttpHeader {
-    #[serde(alias = "User-Agent")]
-    pub user_agent: Option<String>,
-
-    #[serde(alias = "Accept")]
-    pub accept: Option<String>,
-
-    #[serde(alias = "X-Forwarded-For")]
-    pub x_forwarded_for: Option<String>,
-
-    #[serde(alias = "Accept-Language")]
-    pub accept_language: Option<String>,
-
-    #[serde(alias = "Sec-Fetch-Mode")]
-    pub sec_fetch_mode: Option<String>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Fragment {
-    pub duration: Option<f64>,
-    pub fragment_count: Option<usize>,
-    pub path: Option<PathBuf>,
-    pub url: Option<Url>,
-}
-
-impl InfoJson {
-    pub fn to_py_dict(self, py: Python<'_>) -> PyResult<Bound<'_, PyDict>> {
-        let output: Bound<'_, PyDict> = json_loads_str(py, self)?;
-        Ok(output)
-    }
-}
diff --git a/crates/yt_dlp/src/wrapper/mod.rs b/crates/yt_dlp/src/wrapper/mod.rs
deleted file mode 100644
index 3fe3247..0000000
--- a/crates/yt_dlp/src/wrapper/mod.rs
+++ /dev/null
@@ -1,12 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-pub mod info_json;
-// pub mod yt_dlp_options;
diff --git a/crates/yt_dlp/src/wrapper/yt_dlp_options.rs b/crates/yt_dlp/src/wrapper/yt_dlp_options.rs
deleted file mode 100644
index 25595b5..0000000
--- a/crates/yt_dlp/src/wrapper/yt_dlp_options.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use pyo3::{Bound, PyResult, Python, types::PyDict};
-use serde::Serialize;
-
-use crate::json_loads;
-
-#[derive(Serialize, Clone)]
-pub struct YtDlpOptions {
-    pub playliststart: u32,
-    pub playlistend: u32,
-    pub noplaylist: bool,
-    pub extract_flat: ExtractFlat,
-    // pub extractor_args: ExtractorArgs,
-    // pub format: String,
-    // pub fragment_retries: u32,
-    // #[serde(rename(serialize = "getcomments"))]
-    // pub get_comments: bool,
-    // #[serde(rename(serialize = "ignoreerrors"))]
-    // pub ignore_errors: bool,
-    // pub retries: u32,
-    // #[serde(rename(serialize = "writeinfojson"))]
-    // pub write_info_json: bool,
-    // pub postprocessors: Vec<serde_json::Map<String, serde_json::Value>>,
-}
-
-#[derive(Serialize, Copy, Clone)]
-pub enum ExtractFlat {
-    #[serde(rename(serialize = "in_playlist"))]
-    InPlaylist,
-
-    #[serde(rename(serialize = "discard_in_playlist"))]
-    DiscardInPlaylist,
-}
-
-#[derive(Serialize, Clone)]
-pub struct ExtractorArgs {
-    pub youtube: YoutubeExtractorArgs,
-}
-
-#[derive(Serialize, Clone)]
-pub struct YoutubeExtractorArgs {
-    comment_sort: Vec<String>,
-    max_comments: Vec<String>,
-}
-
-impl YtDlpOptions {
-    pub fn to_py_dict(self, py: Python) -> PyResult<Bound<PyDict>> {
-        let string = serde_json::to_string(&self).expect("This should always work");
-
-        let output: Bound<PyDict> = json_loads(py, string)?;
-        Ok(output)
-    }
-}
diff --git a/yt/src/cli.rs b/yt/src/cli.rs
index 037f45c..e7ee4c2 100644
--- a/yt/src/cli.rs
+++ b/yt/src/cli.rs
@@ -103,12 +103,6 @@ pub enum Command {
     /// Show, the configuration options in effect
     Config {},
 
-    /// Perform various tests
-    Check {
-        #[command(subcommand)]
-        command: CheckCommand,
-    },
-
     /// Display the comments of the currently playing video
     Comments {},
     /// Display the description of the currently playing video
@@ -355,12 +349,6 @@ impl Default for SelectCommand {
     }
 }
 
-#[derive(Subcommand, Clone, Debug)]
-pub enum CheckCommand {
-    /// Check if the given `*.info.json` file is deserializable.
-    InfoJson { path: PathBuf },
-}
-
 #[derive(Subcommand, Clone, Copy, Debug)]
 pub enum CacheCommand {
     /// Invalidate all cache entries
diff --git a/yt/src/comments/comment.rs b/yt/src/comments/comment.rs
index 6b8cf73..5bc939c 100644
--- a/yt/src/comments/comment.rs
+++ b/yt/src/comments/comment.rs
@@ -9,7 +9,94 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-use yt_dlp::wrapper::info_json::Comment;
+use serde::{Deserialize, Deserializer, Serialize};
+use url::Url;
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[serde(from = "String")]
+#[serde(deny_unknown_fields)]
+pub enum Parent {
+    Root,
+    Id(String),
+}
+
+impl Parent {
+    #[must_use]
+    pub fn id(&self) -> Option<&str> {
+        if let Self::Id(id) = self {
+            Some(id)
+        } else {
+            None
+        }
+    }
+}
+
+impl From<String> for Parent {
+    fn from(value: String) -> Self {
+        if value == "root" {
+            Self::Root
+        } else {
+            Self::Id(value)
+        }
+    }
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[serde(from = "String")]
+#[serde(deny_unknown_fields)]
+pub struct Id {
+    pub id: String,
+}
+impl From<String> for Id {
+    fn from(value: String) -> Self {
+        Self {
+            // Take the last element if the string is split with dots, otherwise take the full id
+            id: value.split('.').last().unwrap_or(&value).to_owned(),
+        }
+    }
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[allow(clippy::struct_excessive_bools)]
+pub struct Comment {
+    pub id: Id,
+    pub text: String,
+    #[serde(default = "zero")]
+    pub like_count: u32,
+    pub is_pinned: bool,
+    pub author_id: String,
+    #[serde(default = "unknown")]
+    pub author: String,
+    pub author_is_verified: bool,
+    pub author_thumbnail: Url,
+    pub parent: Parent,
+    #[serde(deserialize_with = "edited_from_time_text", alias = "_time_text")]
+    pub edited: bool,
+    // Can't also be deserialized, as it's already used in 'edited'
+    // _time_text: String,
+    pub timestamp: i64,
+    pub author_url: Option<Url>,
+    pub author_is_uploader: bool,
+    pub is_favorited: bool,
+}
+
+fn unknown() -> String {
+    "<Unknown>".to_string()
+}
+fn zero() -> u32 {
+    0
+}
+fn edited_from_time_text<'de, D>(d: D) -> Result<bool, D::Error>
+where
+    D: Deserializer<'de>,
+{
+    let s = String::deserialize(d)?;
+    if s.contains(" (edited)") {
+        Ok(true)
+    } else {
+        Ok(false)
+    }
+}
 
 #[derive(Debug, Clone)]
 #[allow(clippy::module_name_repetitions)]
diff --git a/yt/src/comments/description.rs b/yt/src/comments/description.rs
index d22a40f..e8cb29d 100644
--- a/yt/src/comments/description.rs
+++ b/yt/src/comments/description.rs
@@ -17,7 +17,7 @@ use crate::{
 };
 
 use anyhow::{Result, bail};
-use yt_dlp::wrapper::info_json::InfoJson;
+use yt_dlp::{InfoJson, json_cast};
 
 pub async fn description(app: &App) -> Result<()> {
     let description = get(app).await?;
@@ -39,6 +39,8 @@ pub async fn get(app: &App) -> Result<String> {
     );
 
     Ok(info_json
-        .description
-        .unwrap_or("<No description>".to_owned()))
+        .get("description")
+        .map(|val| json_cast!(val, as_str))
+        .unwrap_or("<No description>")
+        .to_owned())
 }
diff --git a/yt/src/comments/mod.rs b/yt/src/comments/mod.rs
index daecf8d..876146d 100644
--- a/yt/src/comments/mod.rs
+++ b/yt/src/comments/mod.rs
@@ -11,11 +11,11 @@
 
 use std::mem;
 
-use anyhow::{Context, Result, bail};
-use comment::{CommentExt, Comments};
+use anyhow::{Result, bail};
+use comment::{Comment, CommentExt, Comments, Parent};
 use output::display_fmt_and_less;
 use regex::Regex;
-use yt_dlp::wrapper::info_json::{Comment, InfoJson, Parent};
+use yt_dlp::{InfoJson, json_cast};
 
 use crate::{
     app::App,
@@ -39,23 +39,25 @@ pub async fn get(app: &App) -> Result<Comments> {
             bail!("Could not find a currently playing video!");
         };
 
-    let mut info_json: InfoJson = get::video_info_json(&currently_playing_video)?.unreachable(
-        "A currently *playing* must be cached. And thus the info.json should be available",
+    let info_json: InfoJson = get::video_info_json(&currently_playing_video)?.unreachable(
+        "A currently *playing* video must be cached. And thus the info.json should be available",
     );
 
-    let base_comments = mem::take(&mut info_json.comments).with_context(|| {
-        format!(
+    let base_comments = if let Some(comments) = info_json.get("comments") {
+        json_cast!(comments, as_array)
+    } else {
+        bail!(
             "The video ('{}') does not have comments!",
             info_json
-                .title
-                .as_ref()
-                .unwrap_or(&("<No Title>".to_owned()))
+                .get("title")
+                .map(|val| json_cast!(val, as_str))
+                .unwrap_or("<No Title>")
         )
-    })?;
-    drop(info_json);
+    };
 
     let mut comments = Comments::new();
     for c in base_comments {
+        let c: Comment = serde_json::from_value(c.to_owned())?;
         if let Parent::Id(id) = &c.parent {
             comments.insert(&(id.clone()), CommentExt::from(c));
         } else {
diff --git a/yt/src/download/download_options.rs b/yt/src/download/download_options.rs
index 8f5a609..03c20ba 100644
--- a/yt/src/download/download_options.rs
+++ b/yt/src/download/download_options.rs
@@ -9,105 +9,110 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
+use anyhow::Context;
 use serde_json::{Value, json};
+use yt_dlp::{YoutubeDL, YoutubeDLOptions};
 
 use crate::{app::App, storage::video_database::YtDlpOptions};
 
-#[must_use]
-pub fn download_opts(app: &App, additional_opts: &YtDlpOptions) -> serde_json::Map<String, Value> {
-    match json!({
-      "extract_flat": "in_playlist",
-      "extractor_args": {
-        "youtube": {
-          "comment_sort": [
-            "top"
-          ],
-          "max_comments": [
-            "150",
-            "all",
-            "100"
-          ]
-        }
-      },
+use super::progress_hook::wrapped_progress_hook;
 
-      "prefer_free_formats": true,
-      "ffmpeg_location": env!("FFMPEG_LOCATION"),
-      "format": "bestvideo[height<=?1080]+bestaudio/best",
-      "fragment_retries": 10,
-      "getcomments": true,
-      "ignoreerrors": false,
-      "retries": 10,
-
-      "writeinfojson": true,
-      // NOTE: This results in a constant warning message.  <2025-01-04>
-      // "writeannotations": true,
-      "writesubtitles": true,
-      "writeautomaticsub": true,
-
-      "outtmpl": {
-        "default": app.config.paths.download_dir.join("%(channel)s/%(title)s.%(ext)s"),
-        "chapter": "%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s"
-      },
-      "compat_opts": {},
-      "forceprint": {},
-      "print_to_file": {},
-      "windowsfilenames": false,
-      "restrictfilenames": false,
-      "trim_file_names": false,
-      "postprocessors": [
-        {
-          "api": "https://sponsor.ajay.app",
-          "categories": [
-            "interaction",
-            "intro",
-            "music_offtopic",
-            "sponsor",
-            "outro",
-            "poi_highlight",
-            "preview",
-            "selfpromo",
-            "filler",
-            "chapter"
-          ],
-          "key": "SponsorBlock",
-          "when": "after_filter"
-        },
-        {
-          "force_keyframes": false,
-          "key": "ModifyChapters",
-          "remove_chapters_patterns": [],
-          "remove_ranges": [],
-          "remove_sponsor_segments": [
-            "sponsor"
-          ],
-          "sponsorblock_chapter_title": "[SponsorBlock]: %(category_names)l"
-        },
-        {
-          "add_chapters": true,
-          "add_infojson": null,
-          "add_metadata": false,
-          "key": "FFmpegMetadata"
-        },
-        {
-          "key": "FFmpegConcat",
-          "only_multi_video": true,
-          "when": "playlist"
-        }
-      ]
-    }) {
-        Value::Object(mut obj) => {
-            obj.insert(
-                "subtitleslangs".to_owned(),
-                Value::Array(
-                    additional_opts
-                        .subtitle_langs
-                        .split(',')
-                        .map(|val| Value::String(val.to_owned()))
-                        .collect::<Vec<_>>(),
-                ),
-            );
-            obj
-        }
-        _ => unreachable!("This is an object"),
-    }
+pub fn download_opts(app: &App, additional_opts: &YtDlpOptions) -> anyhow::Result<YoutubeDL> {
+    YoutubeDLOptions::new()
+        .with_progress_hook(wrapped_progress_hook)
+        .set("extract_flat", "in_playlist")
+        .set(
+            "extractor_args",
+            json! {
+            {
+                "youtube": {
+                    "comment_sort": [ "top" ],
+                    "max_comments": [ "150", "all", "100" ]
+                }
+            }
+            },
+        )
+        //.set("cookiesfrombrowser", json! {("firefox", "me.google", None::<String>, "youtube_dlp")})
+        .set("prefer_free_formats", true)
+        .set("ffmpeg_location", env!("FFMPEG_LOCATION"))
+        .set("format", "bestvideo[height<=?1080]+bestaudio/best")
+        .set("fragment_retries", 10)
+        .set("getcomments", true)
+        .set("ignoreerrors", false)
+        .set("retries", 10)
+        .set("writeinfojson", true)
+        // NOTE: This results in a constant warning message.  <2025-01-04>
+        //.set("writeannotations", true)
+        .set("writesubtitles", true)
+        .set("writeautomaticsub", true)
+        .set(
+            "outtmpl",
+            json! {
+            {
+                "default": app.config.paths.download_dir.join("%(channel)s/%(title)s.%(ext)s"),
+                "chapter": "%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s"
+            }
+            },
+        )
+        .set("compat_opts", json! {{}})
+        .set("forceprint", json! {{}})
+        .set("print_to_file", json! {{}})
+        .set("windowsfilenames", false)
+        .set("restrictfilenames", false)
+        .set("trim_file_names", false)
+        .set(
+            "postprocessors",
+            json! {
+            [
+                {
+                    "api": "https://sponsor.ajay.app",
+                    "categories": [
+                        "interaction",
+                        "intro",
+                        "music_offtopic",
+                        "sponsor",
+                        "outro",
+                        "poi_highlight",
+                        "preview",
+                        "selfpromo",
+                        "filler",
+                        "chapter"
+                    ],
+                    "key": "SponsorBlock",
+                    "when": "after_filter"
+                },
+                {
+                    "force_keyframes": false,
+                    "key": "ModifyChapters",
+                    "remove_chapters_patterns": [],
+                    "remove_ranges": [],
+                    "remove_sponsor_segments": [ "sponsor" ],
+                    "sponsorblock_chapter_title": "[SponsorBlock]: %(category_names)l"
+                },
+                {
+                    "add_chapters": true,
+                    "add_infojson": null,
+                    "add_metadata": false,
+                    "key": "FFmpegMetadata"
+                },
+                {
+                    "key": "FFmpegConcat",
+                    "only_multi_video": true,
+                    "when": "playlist"
+                }
+            ]
+            },
+        )
+        .set(
+            "subtitleslangs",
+            Value::Array(
+                additional_opts
+                    .subtitle_langs
+                    .split(',')
+                    .map(|val| Value::String(val.to_owned()))
+                    .collect::<Vec<_>>(),
+            ),
+        )
+        .build()
+        .context("Failed to instanciate download yt_dlp")
 }
diff --git a/yt/src/download/mod.rs b/yt/src/download/mod.rs
index 871e869..110bf55 100644
--- a/yt/src/download/mod.rs
+++ b/yt/src/download/mod.rs
@@ -29,9 +29,11 @@ use bytes::Bytes;
 use futures::{FutureExt, future::BoxFuture};
 use log::{debug, error, info, warn};
 use tokio::{fs, task::JoinHandle, time};
+use yt_dlp::{json_cast, json_get};
 
 #[allow(clippy::module_name_repetitions)]
 pub mod download_options;
+pub mod progress_hook;
 
 #[derive(Debug)]
 #[allow(clippy::module_name_repetitions)]
@@ -299,24 +301,25 @@ impl Downloader {
             let add_opts = YtDlpOptions {
                 subtitle_langs: String::new(),
             };
-            let opts = &download_opts(app, &add_opts);
+            let yt_dlp = download_opts(app, &add_opts)?;
 
-            let result =
-                yt_dlp::extract_info(opts, &video.url, false, true).with_context(|| {
+            let result = yt_dlp
+                .extract_info(&video.url, false, true)
+                .with_context(|| {
                     format!("Failed to extract video information: '{}'", video.title)
                 })?;
 
-            let size = if let Some(val) = result.filesize {
-                val
-            } else if let Some(val) = result.filesize_approx {
-                val
-            } else if result.duration.is_some() && result.tbr.is_some() {
+            let size = if let Some(val) = result.get("filesize") {
+                json_cast!(val, as_u64)
+            } else if let Some(val) = result.get("filesize_approx") {
+                json_cast!(val, as_u64)
+            } else if result.get("duration").is_some() && result.get("tbr").is_some() {
                 #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
-                let duration = result.duration.expect("Is some").ceil() as u64;
+                let duration = json_get!(result, "duration", as_f64).ceil() as u64;
 
                 // TODO: yt_dlp gets this from the format
                 #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
-                let tbr = result.tbr.expect("Is Some").ceil() as u64;
+                let tbr = json_get!(result, "tbr", as_f64).ceil() as u64;
 
                 duration * tbr * (1000 / 8)
             } else {
@@ -341,8 +344,10 @@ impl Downloader {
         debug!("Download started: {}", &video.title);
 
         let addional_opts = get_video_yt_dlp_opts(app, &video.extractor_hash).await?;
+        let yt_dlp = download_opts(app, &addional_opts)?;
 
-        let result = yt_dlp::download(&[video.url.clone()], &download_opts(app, &addional_opts))
+        let result = yt_dlp
+            .download(&[video.url.to_owned()])
             .with_context(|| format!("Failed to download video: '{}'", video.title))?;
 
         assert_eq!(result.len(), 1);
diff --git a/yt/src/download/progress_hook.rs b/yt/src/download/progress_hook.rs
new file mode 100644
index 0000000..65156e7
--- /dev/null
+++ b/yt/src/download/progress_hook.rs
@@ -0,0 +1,190 @@
+use std::{
+    io::{Write, stderr},
+    process,
+};
+
+use bytes::Bytes;
+use log::{Level, log_enabled};
+use yt_dlp::mk_python_function;
+
+use crate::select::selection_file::duration::MaybeDuration;
+
+// #[allow(clippy::too_many_lines)]
+// #[allow(clippy::missing_panics_doc)]
+// #[allow(clippy::items_after_statements)]
+// #[allow(
+//     clippy::cast_possible_truncation,
+//     clippy::cast_sign_loss,
+//     clippy::cast_precision_loss
+// )]
+pub fn progress_hook(
+    input: serde_json::Map<String, serde_json::Value>,
+) -> Result<(), std::io::Error> {
+    // Only add the handler, if the log-level is higher than Debug (this avoids covering debug
+    // messages).
+    if log_enabled!(Level::Debug) {
+        return Ok(());
+    }
+
+    // ANSI ESCAPE CODES Wrappers {{{
+    // see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
+    const CSI: &str = "\x1b[";
+    fn clear_whole_line() {
+        eprint!("{CSI}2K");
+    }
+    fn move_to_col(x: usize) {
+        eprint!("{CSI}{x}G");
+    }
+    // }}}
+
+    macro_rules! get {
+        (@interrogate $item:ident, $type_fun:ident, $get_fun:ident, $name:expr) => {{
+            let a = $item.get($name).expect(concat!(
+                "The field '",
+                stringify!($name),
+                "' should exist."
+            ));
+
+            if a.$type_fun() {
+                a.$get_fun().expect(
+                    "The should have been checked in the if guard, so unpacking here is fine",
+                )
+            } else {
+                panic!(
+                    "Value {} => \n{}\n is not of type: {}",
+                    $name,
+                    a,
+                    stringify!($type_fun)
+                );
+            }
+        }};
+
+        ($type_fun:ident, $get_fun:ident, $name1:expr, $name2:expr) => {{
+            let a = get! {@interrogate input, is_object, as_object, $name1};
+            let b = get! {@interrogate a, $type_fun, $get_fun, $name2};
+            b
+        }};
+
+        ($type_fun:ident, $get_fun:ident, $name:expr) => {{
+            get! {@interrogate input, $type_fun, $get_fun, $name}
+        }};
+    }
+
+    macro_rules! default_get {
+        (@interrogate $item:ident, $default:expr, $get_fun:ident, $name:expr) => {{
+            let a = if let Some(field) = $item.get($name) {
+                field.$get_fun().unwrap_or($default)
+            } else {
+                $default
+            };
+            a
+        }};
+
+        ($get_fun:ident, $default:expr, $name1:expr, $name2:expr) => {{
+            let a = get! {@interrogate input, is_object, as_object, $name1};
+            let b = default_get! {@interrogate a, $default, $get_fun, $name2};
+            b
+        }};
+
+        ($get_fun:ident, $default:expr, $name:expr) => {{
+            default_get! {@interrogate input, $default, $get_fun, $name}
+        }};
+    }
+
+    macro_rules! c {
+        ($color:expr, $format:expr) => {
+            format!("\x1b[{}m{}\x1b[0m", $color, $format)
+        };
+    }
+
+    fn format_bytes(bytes: u64) -> String {
+        let bytes = Bytes::new(bytes);
+        bytes.to_string()
+    }
+
+    fn format_speed(speed: f64) -> String {
+        #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
+        let bytes = Bytes::new(speed.floor() as u64);
+        format!("{bytes}/s")
+    }
+
+    let get_title = || -> String {
+        match get! {is_string, as_str, "info_dict", "ext"} {
+            "vtt" => {
+                format!(
+                    "Subtitles ({})",
+                    default_get! {as_str, "<No Subtitle Language>", "info_dict", "name"}
+                )
+            }
+            "webm" | "mp4" | "mp3" | "m4a" => {
+                default_get! { as_str, "<No title>", "info_dict", "title"}.to_owned()
+            }
+            other => panic!("The extension '{other}' is not yet implemented"),
+        }
+    };
+
+    match get! {is_string, as_str, "status"} {
+        "downloading" => {
+            let elapsed = default_get! {as_f64, 0.0f64, "elapsed"};
+            let eta = default_get! {as_f64, 0.0, "eta"};
+            let speed = default_get! {as_f64, 0.0, "speed"};
+
+            let downloaded_bytes = get! {is_u64, as_u64, "downloaded_bytes"};
+            let (total_bytes, bytes_is_estimate): (u64, &'static str) = {
+                let total_bytes = default_get!(as_u64, 0, "total_bytes");
+                if total_bytes == 0 {
+                    let maybe_estimate = default_get!(as_u64, 0, "total_bytes_estimate");
+
+                    if maybe_estimate == 0 {
+                        // The download speed should be in bytes per second and the eta in seconds.
+                        // Thus multiplying them gets us the raw bytes (which were estimated by `yt_dlp`, from their `info.json`)
+                        let bytes_still_needed = (speed * eta).ceil() as u64;
+
+                        (downloaded_bytes + bytes_still_needed, "~")
+                    } else {
+                        (maybe_estimate, "~")
+                    }
+                } else {
+                    (total_bytes, "")
+                }
+            };
+            let percent: f64 = {
+                if total_bytes == 0 {
+                    100.0
+                } else {
+                    (downloaded_bytes as f64 / total_bytes as f64) * 100.0
+                }
+            };
+
+            clear_whole_line();
+            move_to_col(1);
+
+            eprint!(
+                "'{}' [{}/{} at {}] -> [{} of {}{} {}] ",
+                c!("34;1", get_title()),
+                c!("33;1", MaybeDuration::from_secs_f64(elapsed)),
+                c!("33;1", MaybeDuration::from_secs_f64(eta)),
+                c!("32;1", format_speed(speed)),
+                c!("31;1", format_bytes(downloaded_bytes)),
+                c!("31;1", bytes_is_estimate),
+                c!("31;1", format_bytes(total_bytes)),
+                c!("36;1", format!("{:.02}%", percent))
+            );
+            stderr().flush()?;
+        }
+        "finished" => {
+            eprintln!("-> Finished downloading.");
+        }
+        "error" => {
+            // TODO: This should probably return an Err. But I'm not so sure where the error would
+            // bubble up to (i.e., who would catch it) <2025-01-21>
+            eprintln!("-> Error while downloading: {}", get_title());
+            process::exit(1);
+        }
+        other => unreachable!("'{other}' should not be a valid state!"),
+    };
+
+    Ok(())
+}
+
+mk_python_function!(progress_hook, wrapped_progress_hook);
diff --git a/yt/src/main.rs b/yt/src/main.rs
index ffb3e14..413dc5e 100644
--- a/yt/src/main.rs
+++ b/yt/src/main.rs
@@ -13,16 +13,16 @@
 // to print it anyways.
 #![allow(clippy::missing_errors_doc)]
 
-use std::{fs, sync::Arc};
+use std::sync::Arc;
 
 use anyhow::{Context, Result, bail};
 use app::App;
 use bytes::Bytes;
 use cache::{invalidate, maintain};
 use clap::Parser;
-use cli::{CacheCommand, CheckCommand, SelectCommand, SubscriptionCommand, VideosCommand};
+use cli::{CacheCommand, SelectCommand, SubscriptionCommand, VideosCommand};
 use config::Config;
-use log::info;
+use log::{error, info};
 use select::cmds::handle_select_cmd;
 use storage::video_database::get::video_by_hash;
 use tokio::{
@@ -30,7 +30,6 @@ use tokio::{
     io::{BufReader, stdin},
     task::JoinHandle,
 };
-use yt_dlp::wrapper::info_json::InfoJson;
 
 use crate::{cli::Command, storage::subscriptions};
 
@@ -200,7 +199,7 @@ async fn main() -> Result<()> {
                     subscribe::import(&app, BufReader::new(f), force).await?;
                 } else {
                     subscribe::import(&app, BufReader::new(stdin()), force).await?;
-                };
+                }
             }
         },
 
@@ -215,17 +214,6 @@ async fn main() -> Result<()> {
             CacheCommand::Maintain { all } => maintain(&app, all).await?,
         },
 
-        Command::Check { command } => match command {
-            CheckCommand::InfoJson { path } => {
-                let string = fs::read_to_string(&path)
-                    .with_context(|| format!("Failed to read '{}' to string!", path.display()))?;
-
-                drop(
-                    serde_json::from_str::<InfoJson>(&string)
-                        .context("Failed to deserialize value")?,
-                );
-            }
-        },
         Command::Comments {} => {
             comments::comments(&app).await?;
         }
diff --git a/yt/src/select/cmds/add.rs b/yt/src/select/cmds/add.rs
index 8b183f0..387b3a1 100644
--- a/yt/src/select/cmds/add.rs
+++ b/yt/src/select/cmds/add.rs
@@ -14,15 +14,13 @@ use crate::{
     storage::video_database::{
         self, extractor_hash::ExtractorHash, get::get_all_hashes, set::add_video,
     },
-    unreachable::Unreachable,
     update::video_entry_to_video,
 };
 
 use anyhow::{Context, Result, bail};
 use log::{error, warn};
-use serde_json::{Map, Value};
 use url::Url;
-use yt_dlp::wrapper::info_json::InfoType;
+use yt_dlp::{InfoJson, YoutubeDL, json_cast, json_get};
 
 #[allow(clippy::too_many_lines)]
 pub(super) async fn add(
@@ -32,16 +30,11 @@ pub(super) async fn add(
     stop: Option<usize>,
 ) -> Result<()> {
     for url in urls {
-        async fn process_and_add(
-            app: &App,
-            entry: yt_dlp::wrapper::info_json::InfoJson,
-            opts: &Map<String, Value>,
-        ) -> Result<()> {
-            let url = entry
-                .url
-                .unreachable("`yt_dlp` should guarantee that this is Some at this point");
-
-            let entry = yt_dlp::extract_info(opts, &url, false, true)
+        async fn process_and_add(app: &App, entry: InfoJson, yt_dlp: &YoutubeDL) -> Result<()> {
+            let url = json_get!(entry, "url", as_str).parse()?;
+
+            let entry = yt_dlp
+                .extract_info(&url, false, true)
                 .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?;
 
             add_entry(app, entry).await?;
@@ -49,19 +42,13 @@ pub(super) async fn add(
             Ok(())
         }
 
-        async fn add_entry(app: &App, entry: yt_dlp::wrapper::info_json::InfoJson) -> Result<()> {
+        async fn add_entry(app: &App, entry: InfoJson) -> Result<()> {
             // We have to re-fetch all hashes every time, because a user could try to add the same
             // URL twice (for whatever reason.)
             let hashes = get_all_hashes(app)
                 .await
                 .context("Failed to fetch all video hashes")?;
-            let extractor_hash = blake3::hash(
-                entry
-                    .id
-                    .as_ref()
-                    .expect("This should be some at this point")
-                    .as_bytes(),
-            );
+            let extractor_hash = blake3::hash(json_get!(entry, "id", as_str).as_bytes());
             if hashes.contains(&extractor_hash) {
                 error!(
                     "Video '{}'{} is already in the database. Skipped adding it",
@@ -71,17 +58,17 @@ pub(super) async fn add(
                         .with_context(|| format!(
                             "Failed to format hash of video '{}' as short hash",
                             entry
-                                .url
-                                .map_or("<Unknown video Url>".to_owned(), |url| url.to_string())
+                                .get("url")
+                                .map_or("<Unknown video Url>".to_owned(), ToString::to_string)
                         ))?,
                     entry
-                        .title
+                        .get("title")
                         .map_or(String::new(), |title| format!(" ('{title}')"))
                 );
                 return Ok(());
             }
 
-            let video = video_entry_to_video(entry, None)?;
+            let video = video_entry_to_video(&entry, None)?;
             add_video(app, video.clone()).await?;
 
             println!("{}", &video.to_line_display(app).await?);
@@ -89,18 +76,19 @@ pub(super) async fn add(
             Ok(())
         }
 
-        let opts = download_opts(
+        let yt_dlp = download_opts(
             app,
             &video_database::YtDlpOptions {
                 subtitle_langs: String::new(),
             },
-        );
+        )?;
 
-        let entry = yt_dlp::extract_info(&opts, &url, false, true)
+        let entry = yt_dlp
+            .extract_info(&url, false, true)
             .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?;
 
-        match entry._type {
-            Some(InfoType::Video) => {
+        match entry.get("_type").map(|val| json_cast!(val, as_str)) {
+            Some("Video") => {
                 add_entry(app, entry).await?;
                 if start.is_some() || stop.is_some() {
                     warn!(
@@ -108,13 +96,14 @@ pub(super) async fn add(
                     );
                 }
             }
-            Some(InfoType::Playlist) => {
-                if let Some(entries) = entry.entries {
+            Some("Playlist") => {
+                if let Some(entries) = entry.get("entries") {
+                    let entries = json_cast!(entries, as_array);
                     let start = start.unwrap_or(0);
                     let stop = stop.unwrap_or(entries.len() - 1);
 
-                    let mut respected_entries: Vec<_> = take_vector(entries, start, stop)
-                        .with_context(|| {
+                    let respected_entries =
+                        take_vector(entries, start, stop).with_context(|| {
                             format!(
                                 "Failed to take entries starting at: {start} and ending with {stop}"
                             )
@@ -124,11 +113,23 @@ pub(super) async fn add(
                         warn!("No entries found, after applying your start/stop limits.");
                     } else {
                         // Pre-warm the cache
-                        process_and_add(app, respected_entries.remove(0), &opts).await?;
+                        process_and_add(
+                            app,
+                            json_cast!(respected_entries[0], as_object).to_owned(),
+                            &yt_dlp,
+                        )
+                        .await?;
+                        let respected_entries = &respected_entries[1..];
 
                         let futures: Vec<_> = respected_entries
-                            .into_iter()
-                            .map(|entry| process_and_add(app, entry, &opts))
+                            .iter()
+                            .map(|entry| {
+                                process_and_add(
+                                    app,
+                                    json_cast!(entry, as_object).to_owned(),
+                                    &yt_dlp,
+                                )
+                            })
                             .collect();
 
                         for fut in futures {
@@ -149,7 +150,7 @@ pub(super) async fn add(
     Ok(())
 }
 
-fn take_vector<T>(vector: Vec<T>, start: usize, stop: usize) -> Result<Vec<T>> {
+fn take_vector<T>(vector: &[T], start: usize, stop: usize) -> Result<&[T]> {
     let length = vector.len();
 
     if stop >= length {
@@ -158,26 +159,7 @@ fn take_vector<T>(vector: Vec<T>, start: usize, stop: usize) -> Result<Vec<T>> {
         );
     }
 
-    let end_skip = {
-        let base = length
-            .checked_sub(stop)
-            .unreachable("The check above should have caught this case.");
-
-        base.checked_sub(1)
-            .unreachable("The check above should have caught this case.")
-    };
-
-    // NOTE: We're using this instead of the `vector[start..=stop]` notation, because I wanted to
-    // avoid the needed allocation to turn the slice into a vector. <2025-01-04>
-
-    // TODO: This function could also just return a slice, but oh well.. <2025-01-04>
-    Ok(vector
-        .into_iter()
-        .skip(start)
-        .rev()
-        .skip(end_skip)
-        .rev()
-        .collect())
+    Ok(&vector[start..=stop])
 }
 
 #[cfg(test)]
@@ -188,7 +170,7 @@ mod test {
     fn test_vector_take() {
         let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        let new_vec = take_vector(vec, 2, 8).unwrap();
+        let new_vec = take_vector(&vec, 2, 8).unwrap();
 
         assert_eq!(new_vec, vec![2, 3, 4, 5, 6, 7, 8]);
     }
@@ -197,13 +179,13 @@ mod test {
     fn test_vector_take_overflow() {
         let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        assert!(take_vector(vec, 0, 12).is_err());
+        assert!(take_vector(&vec, 0, 12).is_err());
     }
 
     #[test]
     fn test_vector_take_equal() {
         let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        assert!(take_vector(vec, 0, 11).is_err());
+        assert!(take_vector(&vec, 0, 11).is_err());
     }
 }
diff --git a/yt/src/storage/subscriptions.rs b/yt/src/storage/subscriptions.rs
index 37b57fc..6c0d08a 100644
--- a/yt/src/storage/subscriptions.rs
+++ b/yt/src/storage/subscriptions.rs
@@ -15,10 +15,9 @@ use std::collections::HashMap;
 
 use anyhow::Result;
 use log::debug;
-use serde_json::{Value, json};
 use sqlx::query;
 use url::Url;
-use yt_dlp::wrapper::info_json::InfoType;
+use yt_dlp::YoutubeDLOptions;
 
 use crate::{app::App, unreachable::Unreachable};
 
@@ -39,21 +38,19 @@ impl Subscription {
 }
 
 /// Check whether an URL could be used as a subscription URL
-pub async fn check_url(url: &Url) -> Result<bool> {
-    let Value::Object(yt_opts) = json!( {
-        "playliststart": 1,
-        "playlistend": 10,
-        "noplaylist": false,
-        "extract_flat": "in_playlist",
-    }) else {
-        unreachable!("This is hardcoded");
-    };
-
-    let info = yt_dlp::extract_info(&yt_opts, url, false, false)?;
+pub async fn check_url(url: Url) -> Result<bool> {
+    let yt_dlp = YoutubeDLOptions::new()
+        .set("playliststart", 1)
+        .set("playlistend", 10)
+        .set("noplaylist", false)
+        .set("extract_flat", "in_playlist")
+        .build()?;
+
+    let info = yt_dlp.extract_info(&url, false, false)?;
 
     debug!("{:#?}", info);
 
-    Ok(info._type == Some(InfoType::Playlist))
+    Ok(info.get("_type") == Some(&serde_json::Value::String("Playlist".to_owned())))
 }
 
 #[derive(Default, Debug)]
diff --git a/yt/src/storage/video_database/get/mod.rs b/yt/src/storage/video_database/get/mod.rs
index a1871e2..0456cd3 100644
--- a/yt/src/storage/video_database/get/mod.rs
+++ b/yt/src/storage/video_database/get/mod.rs
@@ -18,7 +18,7 @@ use anyhow::{Context, Result, bail};
 use blake3::Hash;
 use log::{debug, trace};
 use sqlx::query;
-use yt_dlp::wrapper::info_json::InfoJson;
+use yt_dlp::InfoJson;
 
 use crate::{
     app::App,
diff --git a/yt/src/subscribe/mod.rs b/yt/src/subscribe/mod.rs
index d77e2bc..e6a5f51 100644
--- a/yt/src/subscribe/mod.rs
+++ b/yt/src/subscribe/mod.rs
@@ -14,10 +14,9 @@ use std::str::FromStr;
 use anyhow::{Context, Result, bail};
 use futures::FutureExt;
 use log::warn;
-use serde_json::{Value, json};
 use tokio::io::{AsyncBufRead, AsyncBufReadExt};
 use url::Url;
-use yt_dlp::wrapper::info_json::InfoType;
+use yt_dlp::{YoutubeDLOptions, json_get};
 
 use crate::{
     app::App,
@@ -149,19 +148,17 @@ async fn actual_subscribe(app: &App, name: Option<String>, url: Url) -> Result<(
     let name = if let Some(name) = name {
         name
     } else {
-        let Value::Object(yt_opts) = json!( {
-            "playliststart": 1,
-            "playlistend": 10,
-            "noplaylist": false,
-            "extract_flat": "in_playlist",
-        }) else {
-            unreachable!("This is hardcoded")
-        };
-
-        let info = yt_dlp::extract_info(&yt_opts, &url, false, false)?;
-
-        if info._type == Some(InfoType::Playlist) {
-            info.title.expect("This should be some for a playlist")
+        let yt_dlp = YoutubeDLOptions::new()
+            .set("playliststart", 1)
+            .set("playlistend", 10)
+            .set("noplaylist", false)
+            .set("extract_flat", "in_playlist")
+            .build()?;
+
+        let info = yt_dlp.extract_info(&url, false, false)?;
+
+        if info.get("_type") == Some(&serde_json::Value::String("Playlist".to_owned())) {
+            json_get!(info, "title", as_str).to_owned()
         } else {
             bail!("The url ('{}') does not represent a playlist!", &url)
         }
diff --git a/yt/src/update/mod.rs b/yt/src/update/mod.rs
index 7efe0da..f0b1e2c 100644
--- a/yt/src/update/mod.rs
+++ b/yt/src/update/mod.rs
@@ -15,7 +15,7 @@ use anyhow::{Context, Ok, Result};
 use chrono::{DateTime, Utc};
 use log::{info, warn};
 use url::Url;
-use yt_dlp::{unsmuggle_url, wrapper::info_json::InfoJson};
+use yt_dlp::{InfoJson, json_cast, json_get};
 
 use crate::{
     app::App,
@@ -72,19 +72,7 @@ pub async fn update(
 }
 
 #[allow(clippy::too_many_lines)]
-pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Result<Video> {
-    macro_rules! unwrap_option {
-        ($option:expr) => {
-            match $option {
-                Some(x) => x,
-                None => anyhow::bail!(concat!(
-                    "Expected a value, but '",
-                    stringify!($option),
-                    "' is None!"
-                )),
-            }
-        };
-    }
+pub fn video_entry_to_video(entry: &InfoJson, sub: Option<&Subscription>) -> Result<Video> {
     fn fmt_context(date: &str, extended: Option<&str>) -> String {
         let f = format!(
             "Failed to parse the `upload_date` of the entry ('{date}'). \
@@ -97,7 +85,9 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
         }
     }
 
-    let publish_date = if let Some(date) = &entry.upload_date {
+    let publish_date = if let Some(date) = &entry.get("upload_date") {
+        let date = json_cast!(date, as_str);
+
         let year: u32 = date
             .chars()
             .take(4)
@@ -113,7 +103,7 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
             .with_context(|| fmt_context(date, None))?;
         let day: u32 = date
             .chars()
-            .skip(6)
+            .skip(4 + 2)
             .take(2)
             .collect::<String>()
             .parse()
@@ -128,42 +118,59 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
     } else {
         warn!(
             "The video '{}' lacks it's upload date!",
-            unwrap_option!(&entry.title)
+            json_get!(entry, "title", as_str)
         );
         None
     };
 
-    let thumbnail_url = match (&entry.thumbnails, &entry.thumbnail) {
+    let thumbnail_url = match (&entry.get("thumbnails"), &entry.get("thumbnail")) {
         (None, None) => None,
-        (None, Some(thumbnail)) => Some(thumbnail.to_owned()),
+        (None, Some(thumbnail)) => Some(Url::from_str(json_cast!(thumbnail, as_str))?),
 
         // TODO: The algorithm is not exactly the best <2024-05-28>
-        (Some(thumbnails), None) => thumbnails.first().map(|thumbnail| thumbnail.url.clone()),
-        (Some(_), Some(thumnail)) => Some(thumnail.to_owned()),
+        (Some(thumbnails), None) => {
+            if let Some(thumbnail) = json_cast!(thumbnails, as_array).first() {
+                Some(Url::from_str(json_get!(
+                    json_cast!(thumbnail, as_object),
+                    "url",
+                    as_str
+                ))?)
+            } else {
+                None
+            }
+        }
+        (Some(_), Some(thumnail)) => Some(Url::from_str(json_cast!(thumnail, as_str))?),
     };
 
     let url = {
-        let smug_url: Url = unwrap_option!(entry.webpage_url.clone());
-        unsmuggle_url(&smug_url)?
+        let smug_url: Url = json_get!(entry, "webpage_url", as_str).parse()?;
+        // unsmuggle_url(&smug_url)?
+        smug_url
     };
 
-    let extractor_hash = blake3::hash(unwrap_option!(entry.id).as_bytes());
+    let extractor_hash = blake3::hash(json_get!(entry, "id", as_str).as_bytes());
 
     let subscription_name = if let Some(sub) = sub {
         Some(sub.name.clone())
-    } else if let Some(uploader) = entry.uploader {
-        if entry.webpage_url_domain == Some("youtube.com".to_owned()) {
+    } else if let Some(uploader) = entry.get("uploader") {
+        if entry.get("webpage_url_domain")
+            == Some(&serde_json::Value::String("youtube.com".to_owned()))
+        {
             Some(format!("{uploader} - Videos"))
         } else {
-            Some(uploader.clone())
+            Some(json_cast!(uploader, as_str).to_owned())
         }
     } else {
         None
     };
 
     let video = Video {
-        description: entry.description.clone(),
-        duration: MaybeDuration::from_maybe_secs_f64(entry.duration),
+        description: entry
+            .get("description")
+            .map(|val| json_cast!(val, as_str).to_owned()),
+        duration: MaybeDuration::from_maybe_secs_f64(
+            entry.get("duration").map(|val| json_cast!(val, as_f64)),
+        ),
         extractor_hash: ExtractorHash::from_hash(extractor_hash),
         last_status_change: TimeStamp::from_now(),
         parent_subscription_name: subscription_name,
@@ -171,7 +178,7 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
         publish_date: publish_date.map(TimeStamp::from_secs),
         status: VideoStatus::Pick,
         thumbnail_url,
-        title: unwrap_option!(entry.title.clone()),
+        title: json_get!(entry, "title", as_str).to_owned(),
         url,
         watch_progress: Duration::default(),
     };
@@ -180,7 +187,7 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
 
 async fn process_subscription(app: &App, sub: &Subscription, entry: InfoJson) -> Result<()> {
     let video =
-        video_entry_to_video(entry, Some(sub)).context("Failed to parse search entry as Video")?;
+        video_entry_to_video(&entry, Some(sub)).context("Failed to parse search entry as Video")?;
 
     add_video(app, video.clone())
         .await
diff --git a/yt/src/update/updater.rs b/yt/src/update/updater.rs
index fe96da3..900fba7 100644
--- a/yt/src/update/updater.rs
+++ b/yt/src/update/updater.rs
@@ -18,7 +18,7 @@ use futures::{
 };
 use log::{Level, debug, error, log_enabled};
 use serde_json::json;
-use yt_dlp::{error::YtDlpError, process_ie_result, wrapper::info_json::InfoJson};
+use yt_dlp::{InfoJson, YoutubeDLOptions, json_cast, json_get};
 
 use crate::{app::App, storage::subscriptions::Subscription};
 
@@ -71,6 +71,7 @@ impl<'a> Updater<'a> {
         &self,
         sub: &'a Subscription,
     ) -> Result<Vec<(&'a Subscription, InfoJson)>> {
+        // TODO(@bpeetz): Deduplicate with the progress_hook. <2025-06-13> )
         // ANSI ESCAPE CODES Wrappers {{{
         // see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
         const CSI: &str = "\x1b[";
@@ -88,15 +89,18 @@ impl<'a> Updater<'a> {
         // }
         // }}}
 
-        let json = json! {
-            {
-                "playliststart": 1,
-                "playlistend": self.max_backlog,
-                "noplaylist": false,
-                "extractor_args": {"youtubetab": {"approximate_date": [""]}},
-            }
-        };
-        let yt_dlp_opts = json.as_object().expect("This is hardcoded");
+        let yt_dlp = YoutubeDLOptions::new()
+            .set("playliststart", 1)
+            .set("playlistend", self.max_backlog)
+            .set("noplaylist", false)
+            .set(
+                "extractor_args",
+                json! {{"youtubetab": {"approximate_date": [""]}}},
+            )
+            // TODO: This also removes unlisted and other stuff. Find a good way to remove the
+            // members-only videos from the feed. <2025-04-17>
+            .set("match-filter", "availability=public")
+            .build()?;
 
         if !log_enabled!(Level::Debug) {
             clear_whole_line();
@@ -106,64 +110,71 @@ impl<'a> Updater<'a> {
             stderr().flush()?;
         }
 
-        let info = yt_dlp::extract_info(yt_dlp_opts, &sub.url, false, false)
+        let info = yt_dlp
+            .extract_info(&sub.url, false, false)
             .with_context(|| format!("Failed to get playlist '{}'.", sub.name))?;
 
-        let entries = info.entries.unwrap_or(vec![]);
+        let empty = vec![];
+        let entries = info
+            .get("entries")
+            .map_or(&empty, |val| json_cast!(val, as_array));
+
         let valid_entries: Vec<(&Subscription, InfoJson)> = entries
-            .into_iter()
+            .iter()
             .take(self.max_backlog)
             .filter_map(|entry| -> Option<(&Subscription, InfoJson)> {
-                let id = entry.id.as_ref().expect("Should exist?");
+                let id = json_get!(entry, "id", as_str);
                 let extractor_hash = blake3::hash(id.as_bytes());
                 if self.hashes.contains(&extractor_hash) {
-                    debug!(
-                        "Skipping entry, as it is already present: '{}'",
-                        extractor_hash
-                    );
+                    debug!("Skipping entry, as it is already present: '{extractor_hash}'",);
                     None
                 } else {
-                    Some((sub, entry))
+                    Some((sub, json_cast!(entry, as_object).to_owned()))
                 }
             })
             .collect();
 
-        let processed_entries = {
-            let base: Result<Vec<(&Subscription, InfoJson)>, YtDlpError> =
-                stream::iter(valid_entries)
-                    .map(|(sub, entry)| async move {
-                        match process_ie_result(yt_dlp_opts, entry, false) {
-                            Ok(output) => Ok((sub, output)),
-                            Err(err) => Err(err),
-                        }
-                    })
-                    .buffer_unordered(100)
-                    .try_collect()
-                    .await;
-            match base {
-                Ok(ok) => ok,
+        let processed_entries: Vec<(&Subscription, InfoJson)> = stream::iter(valid_entries)
+            .map(
+                async |(sub, entry)| match yt_dlp.process_ie_result(entry, false) {
+                    Ok(output) => Ok((sub, output)),
+                    Err(err) => Err(err),
+                },
+            )
+            .buffer_unordered(100)
+            .collect::<Vec<_>>()
+            .await
+            .into_iter()
+            // Don't fail the whole update, if one of the entries fails to fetch.
+            .filter_map(|base| match base {
+                Ok(ok) => Some(ok),
                 Err(err) => {
-                    if let YtDlpError::PythonError { error, kind } = &err {
-                        if kind.as_str() == "<class 'yt_dlp.utils.DownloadError'>"
-                            && error.to_string().as_str().contains(
-                                "Join this channel to get access to members-only content ",
-                            )
-                        {
-                            vec![]
-                        } else {
-                            let error_string = error.to_string();
-                            let error = error_string
-                                .strip_prefix("DownloadError: \u{1b}[0;31mERROR:\u{1b}[0m ")
-                                .expect("This prefix should exists");
-                            error!("{error}");
-                            vec![]
-                        }
-                    } else {
-                        Err(err).context("Failed to process new entries.")?
-                    }
+                    // TODO(@bpeetz): Add this <2025-06-13>
+                    // if let YtDlpError::PythonError { error, kind } = &err {
+                    //     if kind.as_str() == "<class 'yt_dlp.utils.DownloadError'>"
+                    //         && error.to_string().as_str().contains(
+                    //             "Join this channel to get access to members-only content ",
+                    //         )
+                    //     {
+                    //         // Hide this error
+                    //     } else {
+                    //         let error_string = error.to_string();
+                    //         let error = error_string
+                    //             .strip_prefix("DownloadError: \u{1b}[0;31mERROR:\u{1b}[0m ")
+                    //             .expect("This prefix should exists");
+                    //         error!("{error}");
+                    //     }
+                    //     return None;
+                    // }
+
+                    // TODO(@bpeetz): Ideally, we _would_ actually exit on unexpected errors, but
+                    // this is fine for now.  <2025-06-13>
+                    // Some(Err(err).context("Failed to process new entries."))
+                    error!("While processing entry: {err}");
+                    None
                 }
-            }
-        };
+            })
+            .collect();
 
         Ok(processed_entries)
     }