about summary refs log tree commit diff stats
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--crates/fmt/Cargo.toml2
-rw-r--r--crates/libmpv2/examples/opengl.rs17
-rw-r--r--crates/libmpv2/libmpv2-sys/Cargo.toml2
-rw-r--r--crates/libmpv2/src/lib.rs2
-rw-r--r--crates/libmpv2/src/mpv/events.rs50
-rw-r--r--crates/libmpv2/src/mpv/protocol.rs127
-rw-r--r--crates/libmpv2/src/mpv/render.rs48
-rw-r--r--crates/libmpv2/src/tests.rs24
-rw-r--r--crates/yt/Cargo.toml (renamed from yt/Cargo.toml)18
-rw-r--r--crates/yt/src/ansi_escape_codes.rs26
-rw-r--r--crates/yt/src/app.rs (renamed from yt/src/app.rs)0
-rw-r--r--crates/yt/src/cache/mod.rs (renamed from yt/src/cache/mod.rs)0
-rw-r--r--crates/yt/src/cli.rs (renamed from yt/src/cli.rs)20
-rw-r--r--crates/yt/src/comments/comment.rs152
-rw-r--r--crates/yt/src/comments/description.rs (renamed from yt/src/comments/description.rs)8
-rw-r--r--crates/yt/src/comments/display.rs (renamed from yt/src/comments/display.rs)0
-rw-r--r--crates/yt/src/comments/mod.rs (renamed from yt/src/comments/mod.rs)26
-rw-r--r--crates/yt/src/comments/output.rs (renamed from yt/src/comments/output.rs)0
-rw-r--r--crates/yt/src/config/default.rs (renamed from yt/src/config/default.rs)6
-rw-r--r--crates/yt/src/config/definitions.rs (renamed from yt/src/config/definitions.rs)0
-rw-r--r--crates/yt/src/config/file_system.rs (renamed from yt/src/config/file_system.rs)0
-rw-r--r--crates/yt/src/config/mod.rs (renamed from yt/src/config/mod.rs)0
-rw-r--r--crates/yt/src/constants.rs (renamed from yt/src/constants.rs)0
-rw-r--r--crates/yt/src/download/download_options.rs118
-rw-r--r--crates/yt/src/download/mod.rs (renamed from yt/src/download/mod.rs)31
-rw-r--r--crates/yt/src/download/progress_hook.rs188
-rw-r--r--crates/yt/src/main.rs (renamed from yt/src/main.rs)33
-rw-r--r--crates/yt/src/select/cmds/add.rs (renamed from yt/src/select/cmds/add.rs)111
-rw-r--r--crates/yt/src/select/cmds/mod.rs (renamed from yt/src/select/cmds/mod.rs)13
-rw-r--r--crates/yt/src/select/mod.rs (renamed from yt/src/select/mod.rs)15
-rw-r--r--crates/yt/src/select/selection_file/duration.rs (renamed from yt/src/select/selection_file/duration.rs)0
-rw-r--r--crates/yt/src/select/selection_file/help.str (renamed from yt/src/select/selection_file/help.str)0
-rw-r--r--crates/yt/src/select/selection_file/help.str.license (renamed from yt/src/select/selection_file/help.str.license)0
-rw-r--r--crates/yt/src/select/selection_file/mod.rs (renamed from yt/src/select/selection_file/mod.rs)0
-rw-r--r--crates/yt/src/status/mod.rs (renamed from yt/src/status/mod.rs)11
-rw-r--r--crates/yt/src/storage/migrate/mod.rs (renamed from yt/src/storage/migrate/mod.rs)189
-rw-r--r--crates/yt/src/storage/migrate/sql/0_Empty_to_Zero.sql (renamed from yt/src/storage/migrate/sql/00_empty_to_zero.sql)0
-rw-r--r--crates/yt/src/storage/migrate/sql/1_Zero_to_One.sql (renamed from yt/src/storage/migrate/sql/01_zero_to_one.sql)0
-rw-r--r--crates/yt/src/storage/migrate/sql/2_One_to_Two.sql (renamed from yt/src/storage/migrate/sql/02_one_to_two.sql)0
-rw-r--r--crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql85
-rw-r--r--crates/yt/src/storage/mod.rs (renamed from yt/src/storage/mod.rs)2
-rw-r--r--crates/yt/src/storage/subscriptions.rs (renamed from yt/src/storage/subscriptions.rs)25
-rw-r--r--crates/yt/src/storage/video_database/downloader.rs (renamed from yt/src/storage/video_database/downloader.rs)0
-rw-r--r--crates/yt/src/storage/video_database/extractor_hash.rs (renamed from yt/src/storage/video_database/extractor_hash.rs)0
-rw-r--r--crates/yt/src/storage/video_database/get/mod.rs (renamed from yt/src/storage/video_database/get/mod.rs)8
-rw-r--r--crates/yt/src/storage/video_database/get/playlist/iterator.rs (renamed from yt/src/storage/video_database/get/playlist/iterator.rs)0
-rw-r--r--crates/yt/src/storage/video_database/get/playlist/mod.rs (renamed from yt/src/storage/video_database/get/playlist/mod.rs)0
-rw-r--r--crates/yt/src/storage/video_database/mod.rs (renamed from yt/src/storage/video_database/mod.rs)0
-rw-r--r--crates/yt/src/storage/video_database/notify.rs (renamed from yt/src/storage/video_database/notify.rs)0
-rw-r--r--crates/yt/src/storage/video_database/set/mod.rs (renamed from yt/src/storage/video_database/set/mod.rs)60
-rw-r--r--crates/yt/src/storage/video_database/set/playlist.rs (renamed from yt/src/storage/video_database/set/playlist.rs)36
-rw-r--r--crates/yt/src/subscribe/mod.rs (renamed from yt/src/subscribe/mod.rs)31
-rw-r--r--crates/yt/src/unreachable.rs (renamed from yt/src/unreachable.rs)0
-rw-r--r--crates/yt/src/update/mod.rs (renamed from yt/src/update/mod.rs)69
-rw-r--r--crates/yt/src/update/updater.rs167
-rw-r--r--crates/yt/src/version/mod.rs (renamed from yt/src/version/mod.rs)0
-rw-r--r--crates/yt/src/videos/display/format_video.rs (renamed from yt/src/videos/display/format_video.rs)0
-rw-r--r--crates/yt/src/videos/display/mod.rs (renamed from yt/src/videos/display/mod.rs)0
-rw-r--r--crates/yt/src/videos/mod.rs (renamed from yt/src/videos/mod.rs)0
-rw-r--r--crates/yt/src/watch/mod.rs (renamed from yt/src/watch/mod.rs)18
-rw-r--r--crates/yt/src/watch/playlist.rs (renamed from yt/src/watch/playlist.rs)12
-rw-r--r--crates/yt/src/watch/playlist_handler/client_messages/mod.rs (renamed from yt/src/watch/playlist_handler/client_messages/mod.rs)0
-rw-r--r--crates/yt/src/watch/playlist_handler/mod.rs (renamed from yt/src/watch/playlist_handler/mod.rs)29
-rw-r--r--crates/yt_dlp/.cargo/config.toml12
-rw-r--r--crates/yt_dlp/Cargo.toml13
-rw-r--r--crates/yt_dlp/src/duration.rs78
-rw-r--r--crates/yt_dlp/src/error.rs68
-rw-r--r--crates/yt_dlp/src/lib.rs956
-rw-r--r--crates/yt_dlp/src/logging.rs148
-rw-r--r--crates/yt_dlp/src/progress_hook.rs41
-rw-r--r--crates/yt_dlp/src/python_json_decode_failed.error_msg5
-rw-r--r--crates/yt_dlp/src/python_json_decode_failed.error_msg.license9
-rw-r--r--crates/yt_dlp/src/tests.rs89
-rw-r--r--crates/yt_dlp/src/wrapper/info_json.rs824
-rw-r--r--crates/yt_dlp/src/wrapper/mod.rs12
-rw-r--r--crates/yt_dlp/src/wrapper/yt_dlp_options.rs62
76 files changed, 1878 insertions, 2218 deletions
diff --git a/crates/fmt/Cargo.toml b/crates/fmt/Cargo.toml
index 7f82a09..f3cf4ad 100644
--- a/crates/fmt/Cargo.toml
+++ b/crates/fmt/Cargo.toml
@@ -24,7 +24,7 @@ publish = false
 path = "src/fmt.rs"
 
 [dependencies]
-unicode-width = "0.2.0"
+unicode-width = "0.2.1"
 
 [lints]
 workspace = true
diff --git a/crates/libmpv2/examples/opengl.rs b/crates/libmpv2/examples/opengl.rs
index 8eb9647..9f595aa 100644
--- a/crates/libmpv2/examples/opengl.rs
+++ b/crates/libmpv2/examples/opengl.rs
@@ -38,13 +38,16 @@ fn main() {
         Ok(())
     })
     .unwrap();
-    let mut render_context = RenderContext::new(unsafe { mpv.ctx.as_mut() }, vec![
-        RenderParam::ApiType(RenderParamApiType::OpenGl),
-        RenderParam::InitParams(OpenGLInitParams {
-            get_proc_address,
-            ctx: video,
-        }),
-    ])
+    let mut render_context = RenderContext::new(
+        unsafe { mpv.ctx.as_mut() },
+        vec![
+            RenderParam::ApiType(RenderParamApiType::OpenGl),
+            RenderParam::InitParams(OpenGLInitParams {
+                get_proc_address,
+                ctx: video,
+            }),
+        ],
+    )
     .expect("Failed creating render context");
 
     event_subsystem
diff --git a/crates/libmpv2/libmpv2-sys/Cargo.toml b/crates/libmpv2/libmpv2-sys/Cargo.toml
index b0514b8..96141d3 100644
--- a/crates/libmpv2/libmpv2-sys/Cargo.toml
+++ b/crates/libmpv2/libmpv2-sys/Cargo.toml
@@ -23,4 +23,4 @@ rust-version.workspace = true
 publish = false
 
 [build-dependencies]
-bindgen = { version = "0.71.1" }
+bindgen = { version = "0.72.0" }
diff --git a/crates/libmpv2/src/lib.rs b/crates/libmpv2/src/lib.rs
index d47e620..f6c2103 100644
--- a/crates/libmpv2/src/lib.rs
+++ b/crates/libmpv2/src/lib.rs
@@ -35,7 +35,7 @@ use std::os::raw as ctype;
 pub const MPV_CLIENT_API_MAJOR: ctype::c_ulong = 2;
 pub const MPV_CLIENT_API_MINOR: ctype::c_ulong = 2;
 pub const MPV_CLIENT_API_VERSION: ctype::c_ulong =
-    MPV_CLIENT_API_MAJOR << 16 | MPV_CLIENT_API_MINOR;
+    (MPV_CLIENT_API_MAJOR << 16) | MPV_CLIENT_API_MINOR;
 
 mod mpv;
 #[cfg(test)]
diff --git a/crates/libmpv2/src/mpv/events.rs b/crates/libmpv2/src/mpv/events.rs
index e27da2c..f10ff6e 100644
--- a/crates/libmpv2/src/mpv/events.rs
+++ b/crates/libmpv2/src/mpv/events.rs
@@ -70,26 +70,28 @@ impl<'a> PropertyData<'a> {
     // SAFETY: meant to extract the data from an event property. See `mpv_event_property` in
     // `client.h`
     unsafe fn from_raw(format: MpvFormat, ptr: *mut ctype::c_void) -> Result<PropertyData<'a>> {
-        assert!(!ptr.is_null());
-        match format {
-            mpv_format::Flag => Ok(PropertyData::Flag(*(ptr as *mut bool))),
-            mpv_format::String => {
-                let char_ptr = *(ptr as *mut *mut ctype::c_char);
-                Ok(PropertyData::Str(mpv_cstr_to_str!(char_ptr)?))
-            }
-            mpv_format::OsdString => {
-                let char_ptr = *(ptr as *mut *mut ctype::c_char);
-                Ok(PropertyData::OsdStr(mpv_cstr_to_str!(char_ptr)?))
-            }
-            mpv_format::Double => Ok(PropertyData::Double(*(ptr as *mut f64))),
-            mpv_format::Int64 => Ok(PropertyData::Int64(*(ptr as *mut i64))),
-            mpv_format::Node => {
-                let sys_node = *(ptr as *mut libmpv2_sys::mpv_node);
-                let node = SysMpvNode::new(sys_node, false);
-                Ok(PropertyData::Node(node.value().unwrap()))
+        unsafe {
+            assert!(!ptr.is_null());
+            match format {
+                mpv_format::Flag => Ok(PropertyData::Flag(*(ptr as *mut bool))),
+                mpv_format::String => {
+                    let char_ptr = *(ptr as *mut *mut ctype::c_char);
+                    Ok(PropertyData::Str(mpv_cstr_to_str!(char_ptr)?))
+                }
+                mpv_format::OsdString => {
+                    let char_ptr = *(ptr as *mut *mut ctype::c_char);
+                    Ok(PropertyData::OsdStr(mpv_cstr_to_str!(char_ptr)?))
+                }
+                mpv_format::Double => Ok(PropertyData::Double(*(ptr as *mut f64))),
+                mpv_format::Int64 => Ok(PropertyData::Int64(*(ptr as *mut i64))),
+                mpv_format::Node => {
+                    let sys_node = *(ptr as *mut libmpv2_sys::mpv_node);
+                    let node = SysMpvNode::new(sys_node, false);
+                    Ok(PropertyData::Node(node.value().unwrap()))
+                }
+                mpv_format::None => unreachable!(),
+                _ => unimplemented!(),
             }
-            mpv_format::None => unreachable!(),
-            _ => unimplemented!(),
         }
     }
 }
@@ -146,11 +148,13 @@ pub enum Event<'a> {
 }
 
 unsafe extern "C" fn wu_wrapper<F: Fn() + Send + 'static>(ctx: *mut c_void) {
-    if ctx.is_null() {
-        panic!("ctx for wakeup wrapper is NULL");
-    }
+    unsafe {
+        if ctx.is_null() {
+            panic!("ctx for wakeup wrapper is NULL");
+        }
 
-    (*(ctx as *mut F))();
+        (*(ctx as *mut F))();
+    }
 }
 
 /// Context to listen to events.
diff --git a/crates/libmpv2/src/mpv/protocol.rs b/crates/libmpv2/src/mpv/protocol.rs
index ec840d8..ee33411 100644
--- a/crates/libmpv2/src/mpv/protocol.rs
+++ b/crates/libmpv2/src/mpv/protocol.rs
@@ -63,26 +63,28 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = user_data as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = user_data as *mut ProtocolData<T, U>;
 
-    (*info).cookie = user_data;
-    (*info).read_fn = Some(read_wrapper::<T, U>);
-    (*info).seek_fn = Some(seek_wrapper::<T, U>);
-    (*info).size_fn = Some(size_wrapper::<T, U>);
-    (*info).close_fn = Some(close_wrapper::<T, U>);
+        (*info).cookie = user_data;
+        (*info).read_fn = Some(read_wrapper::<T, U>);
+        (*info).seek_fn = Some(seek_wrapper::<T, U>);
+        (*info).size_fn = Some(size_wrapper::<T, U>);
+        (*info).close_fn = Some(close_wrapper::<T, U>);
 
-    let ret = panic::catch_unwind(|| {
-        let uri = mpv_cstr_to_str!(uri as *const _).unwrap();
-        ptr::write(
-            (*data).cookie,
-            ((*data).open_fn)(&mut (*data).user_data, uri),
-        );
-    });
+        let ret = panic::catch_unwind(|| {
+            let uri = mpv_cstr_to_str!(uri as *const _).unwrap();
+            ptr::write(
+                (*data).cookie,
+                ((*data).open_fn)(&mut (*data).user_data, uri),
+            );
+        });
 
-    if ret.is_ok() {
-        0
-    } else {
-        mpv_error::Generic as _
+        if ret.is_ok() {
+            0
+        } else {
+            mpv_error::Generic as _
+        }
     }
 }
 
@@ -95,13 +97,15 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = cookie as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = cookie as *mut ProtocolData<T, U>;
 
-    let ret = panic::catch_unwind(|| {
-        let slice = slice::from_raw_parts_mut(buf, nbytes as _);
-        ((*data).read_fn)(&mut *(*data).cookie, slice)
-    });
-    ret.unwrap_or(-1)
+        let ret = panic::catch_unwind(|| {
+            let slice = slice::from_raw_parts_mut(buf, nbytes as _);
+            ((*data).read_fn)(&mut *(*data).cookie, slice)
+        });
+        ret.unwrap_or(-1)
+    }
 }
 
 unsafe extern "C" fn seek_wrapper<T, U>(cookie: *mut ctype::c_void, offset: i64) -> i64
@@ -109,18 +113,21 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = cookie as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = cookie as *mut ProtocolData<T, U>;
 
-    if (*data).seek_fn.is_none() {
-        return mpv_error::Unsupported as _;
-    }
+        if (*data).seek_fn.is_none() {
+            return mpv_error::Unsupported as _;
+        }
 
-    let ret =
-        panic::catch_unwind(|| (*(*data).seek_fn.as_ref().unwrap())(&mut *(*data).cookie, offset));
-    if let Ok(ret) = ret {
-        ret
-    } else {
-        mpv_error::Generic as _
+        let ret = panic::catch_unwind(|| {
+            (*(*data).seek_fn.as_ref().unwrap())(&mut *(*data).cookie, offset)
+        });
+        if let Ok(ret) = ret {
+            ret
+        } else {
+            mpv_error::Generic as _
+        }
     }
 }
 
@@ -129,17 +136,20 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = cookie as *mut ProtocolData<T, U>;
+    unsafe {
+        let data = cookie as *mut ProtocolData<T, U>;
 
-    if (*data).size_fn.is_none() {
-        return mpv_error::Unsupported as _;
-    }
+        if (*data).size_fn.is_none() {
+            return mpv_error::Unsupported as _;
+        }
 
-    let ret = panic::catch_unwind(|| (*(*data).size_fn.as_ref().unwrap())(&mut *(*data).cookie));
-    if let Ok(ret) = ret {
-        ret
-    } else {
-        mpv_error::Unsupported as _
+        let ret =
+            panic::catch_unwind(|| (*(*data).size_fn.as_ref().unwrap())(&mut *(*data).cookie));
+        if let Ok(ret) = ret {
+            ret
+        } else {
+            mpv_error::Unsupported as _
+        }
     }
 }
 
@@ -149,9 +159,11 @@ where
     T: RefUnwindSafe,
     U: RefUnwindSafe,
 {
-    let data = Box::from_raw(cookie as *mut ProtocolData<T, U>);
+    unsafe {
+        let data = Box::from_raw(cookie as *mut ProtocolData<T, U>);
 
-    panic::catch_unwind(|| (data.close_fn)(Box::from_raw(data.cookie)));
+        panic::catch_unwind(|| (data.close_fn)(Box::from_raw(data.cookie)));
+    }
 }
 
 struct ProtocolData<T, U> {
@@ -224,20 +236,23 @@ impl<T: RefUnwindSafe, U: RefUnwindSafe> Protocol<T, U> {
         seek_fn: Option<StreamSeek<T>>,
         size_fn: Option<StreamSize<T>>,
     ) -> Protocol<T, U> {
-        let c_layout = Layout::from_size_align(mem::size_of::<T>(), mem::align_of::<T>()).unwrap();
-        let cookie = alloc::alloc(c_layout) as *mut T;
-        let data = Box::into_raw(Box::new(ProtocolData {
-            cookie,
-            user_data,
+        unsafe {
+            let c_layout =
+                Layout::from_size_align(mem::size_of::<T>(), mem::align_of::<T>()).unwrap();
+            let cookie = alloc::alloc(c_layout) as *mut T;
+            let data = Box::into_raw(Box::new(ProtocolData {
+                cookie,
+                user_data,
 
-            open_fn,
-            close_fn,
-            read_fn,
-            seek_fn,
-            size_fn,
-        }));
+                open_fn,
+                close_fn,
+                read_fn,
+                seek_fn,
+                size_fn,
+            }));
 
-        Protocol { name, data }
+            Protocol { name, data }
+        }
     }
 
     fn register(&self, ctx: *mut libmpv2_sys::mpv_handle) -> Result<()> {
diff --git a/crates/libmpv2/src/mpv/render.rs b/crates/libmpv2/src/mpv/render.rs
index 6457048..02f70bb 100644
--- a/crates/libmpv2/src/mpv/render.rs
+++ b/crates/libmpv2/src/mpv/render.rs
@@ -125,26 +125,30 @@ impl<C> From<&RenderParam<C>> for u32 {
 }
 
 unsafe extern "C" fn gpa_wrapper<GLContext>(ctx: *mut c_void, name: *const i8) -> *mut c_void {
-    if ctx.is_null() {
-        panic!("ctx for get_proc_address wrapper is NULL");
-    }
+    unsafe {
+        if ctx.is_null() {
+            panic!("ctx for get_proc_address wrapper is NULL");
+        }
 
-    let params: *mut OpenGLInitParams<GLContext> = ctx as _;
-    let params = &*params;
-    (params.get_proc_address)(
-        &params.ctx,
-        CStr::from_ptr(name)
-            .to_str()
-            .expect("Could not convert function name to str"),
-    )
+        let params: *mut OpenGLInitParams<GLContext> = ctx as _;
+        let params = &*params;
+        (params.get_proc_address)(
+            &params.ctx,
+            CStr::from_ptr(name)
+                .to_str()
+                .expect("Could not convert function name to str"),
+        )
+    }
 }
 
 unsafe extern "C" fn ru_wrapper<F: Fn() + Send + 'static>(ctx: *mut c_void) {
-    if ctx.is_null() {
-        panic!("ctx for render_update wrapper is NULL");
-    }
+    unsafe {
+        if ctx.is_null() {
+            panic!("ctx for render_update wrapper is NULL");
+        }
 
-    (*(ctx as *mut F))();
+        (*(ctx as *mut F))();
+    }
 }
 
 impl<C> From<OpenGLInitParams<C>> for libmpv2_sys::mpv_opengl_init_params {
@@ -197,14 +201,18 @@ impl<C> From<RenderParam<C>> for libmpv2_sys::mpv_render_param {
 }
 
 unsafe fn free_void_data<T>(ptr: *mut c_void) {
-    drop(Box::<T>::from_raw(ptr as *mut T));
+    unsafe {
+        drop(Box::<T>::from_raw(ptr as *mut T));
+    }
 }
 
 unsafe fn free_init_params<C>(ptr: *mut c_void) {
-    let params = Box::from_raw(ptr as *mut libmpv2_sys::mpv_opengl_init_params);
-    drop(Box::from_raw(
-        params.get_proc_address_ctx as *mut OpenGLInitParams<C>,
-    ));
+    unsafe {
+        let params = Box::from_raw(ptr as *mut libmpv2_sys::mpv_opengl_init_params);
+        drop(Box::from_raw(
+            params.get_proc_address_ctx as *mut OpenGLInitParams<C>,
+        ));
+    }
 }
 
 impl RenderContext {
diff --git a/crates/libmpv2/src/tests.rs b/crates/libmpv2/src/tests.rs
index 6106eb2..68753fc 100644
--- a/crates/libmpv2/src/tests.rs
+++ b/crates/libmpv2/src/tests.rs
@@ -54,10 +54,10 @@ fn properties() {
         0.6,
         f64::round(subg * f64::powi(10.0, 4)) / f64::powi(10.0, 4)
     );
-    mpv.command("loadfile", &[
-        "test-data/speech_12kbps_mb.wav",
-        "append-play",
-    ])
+    mpv.command(
+        "loadfile",
+        &["test-data/speech_12kbps_mb.wav", "append-play"],
+    )
     .unwrap();
     thread::sleep(Duration::from_millis(250));
 
@@ -185,10 +185,10 @@ fn events() {
 fn node_map() {
     let mpv = Mpv::new().unwrap();
 
-    mpv.command("loadfile", &[
-        "test-data/speech_12kbps_mb.wav",
-        "append-play",
-    ])
+    mpv.command(
+        "loadfile",
+        &["test-data/speech_12kbps_mb.wav", "append-play"],
+    )
     .unwrap();
 
     thread::sleep(Duration::from_millis(250));
@@ -217,10 +217,10 @@ fn node_map() {
 fn node_array() -> Result<()> {
     let mpv = Mpv::new()?;
 
-    mpv.command("loadfile", &[
-        "test-data/speech_12kbps_mb.wav",
-        "append-play",
-    ])
+    mpv.command(
+        "loadfile",
+        &["test-data/speech_12kbps_mb.wav", "append-play"],
+    )
     .unwrap();
 
     thread::sleep(Duration::from_millis(250));
diff --git a/yt/Cargo.toml b/crates/yt/Cargo.toml
index 6f6e470..17d4016 100644
--- a/yt/Cargo.toml
+++ b/crates/yt/Cargo.toml
@@ -24,21 +24,21 @@ rust-version.workspace = true
 publish = false
 
 [dependencies]
-anyhow = "1.0.96"
-blake3 = "1.6.0"
-chrono = { version = "0.4.39", features = ["now"] }
+anyhow = "1.0.98"
+blake3 = "1.8.2"
+chrono = { version = "0.4.41", features = ["now"] }
 chrono-humanize = "0.2.3"
-clap = { version = "4.5.30", features = ["derive"] }
+clap = { version = "4.5.40", features = ["derive"] }
 futures = "0.3.31"
 nucleo-matcher = "0.3.1"
-owo-colors = "4.1.0"
+owo-colors = "4.2.1"
 regex = "1.11.1"
-sqlx = { version = "0.8.3", features = ["runtime-tokio", "sqlite"] }
+sqlx = { version = "0.8.6", features = ["runtime-tokio", "sqlite"] }
 stderrlog = "0.6.0"
-tempfile = "3.17.1"
-toml = "0.8.20"
+tempfile = "3.20.0"
+toml = "0.8.23"
 trinitry = { version = "0.2.2" }
-xdg = "2.5.2"
+xdg = "3.0.0"
 bytes.workspace = true
 libmpv2.workspace = true
 log.workspace = true
diff --git a/crates/yt/src/ansi_escape_codes.rs b/crates/yt/src/ansi_escape_codes.rs
new file mode 100644
index 0000000..ae1805d
--- /dev/null
+++ b/crates/yt/src/ansi_escape_codes.rs
@@ -0,0 +1,26 @@
+// see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
+const CSI: &str = "\x1b[";
+pub fn erase_in_display_from_cursor() {
+    print!("{CSI}0J");
+}
+pub fn cursor_up(number: usize) {
+    // HACK(@bpeetz): The default is `1` and running this command with a
+    // number of `0` results in it using the default (i.e., `1`) <2025-03-25>
+    if number != 0 {
+        print!("{CSI}{number}A");
+    }
+}
+
+pub fn clear_whole_line() {
+    eprint!("{CSI}2K");
+}
+pub fn move_to_col(x: usize) {
+    eprint!("{CSI}{x}G");
+}
+
+pub fn hide_cursor() {
+    eprint!("{CSI}?25l");
+}
+pub fn show_cursor() {
+    eprint!("{CSI}?25h");
+}
diff --git a/yt/src/app.rs b/crates/yt/src/app.rs
index 15a9388..15a9388 100644
--- a/yt/src/app.rs
+++ b/crates/yt/src/app.rs
diff --git a/yt/src/cache/mod.rs b/crates/yt/src/cache/mod.rs
index 83d5ee0..83d5ee0 100644
--- a/yt/src/cache/mod.rs
+++ b/crates/yt/src/cache/mod.rs
diff --git a/yt/src/cli.rs b/crates/yt/src/cli.rs
index 037f45c..de7a5b8 100644
--- a/yt/src/cli.rs
+++ b/crates/yt/src/cli.rs
@@ -40,10 +40,6 @@ pub struct CliArgs {
     #[arg(long, short, action=ArgAction::SetTrue, default_value_t = false)]
     pub no_migrate_db: bool,
 
-    /// Increase message verbosity
-    #[arg(long="verbose", short = 'v', action = ArgAction::Count)]
-    pub verbosity: u8,
-
     /// Display colors [defaults to true, if the config file has no value]
     #[arg(long, short = 'C')]
     pub color: Option<bool>,
@@ -57,6 +53,10 @@ pub struct CliArgs {
     #[arg(long, short)]
     pub config_path: Option<PathBuf>,
 
+    /// Increase message verbosity
+    #[arg(long="verbose", short = 'v', action = ArgAction::Count)]
+    pub verbosity: u8,
+
     /// Silence all output
     #[arg(long, short = 'q')]
     pub quiet: bool,
@@ -103,12 +103,6 @@ pub enum Command {
     /// Show, the configuration options in effect
     Config {},
 
-    /// Perform various tests
-    Check {
-        #[command(subcommand)]
-        command: CheckCommand,
-    },
-
     /// Display the comments of the currently playing video
     Comments {},
     /// Display the description of the currently playing video
@@ -355,12 +349,6 @@ impl Default for SelectCommand {
     }
 }
 
-#[derive(Subcommand, Clone, Debug)]
-pub enum CheckCommand {
-    /// Check if the given `*.info.json` file is deserializable.
-    InfoJson { path: PathBuf },
-}
-
 #[derive(Subcommand, Clone, Copy, Debug)]
 pub enum CacheCommand {
     /// Invalidate all cache entries
diff --git a/crates/yt/src/comments/comment.rs b/crates/yt/src/comments/comment.rs
new file mode 100644
index 0000000..5bc939c
--- /dev/null
+++ b/crates/yt/src/comments/comment.rs
@@ -0,0 +1,152 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use serde::{Deserialize, Deserializer, Serialize};
+use url::Url;
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[serde(from = "String")]
+#[serde(deny_unknown_fields)]
+pub enum Parent {
+    Root,
+    Id(String),
+}
+
+impl Parent {
+    #[must_use]
+    pub fn id(&self) -> Option<&str> {
+        if let Self::Id(id) = self {
+            Some(id)
+        } else {
+            None
+        }
+    }
+}
+
+impl From<String> for Parent {
+    fn from(value: String) -> Self {
+        if value == "root" {
+            Self::Root
+        } else {
+            Self::Id(value)
+        }
+    }
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[serde(from = "String")]
+#[serde(deny_unknown_fields)]
+pub struct Id {
+    pub id: String,
+}
+impl From<String> for Id {
+    fn from(value: String) -> Self {
+        Self {
+            // Take the last element if the string is split with dots, otherwise take the full id
+            id: value.split('.').last().unwrap_or(&value).to_owned(),
+        }
+    }
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[allow(clippy::struct_excessive_bools)]
+pub struct Comment {
+    pub id: Id,
+    pub text: String,
+    #[serde(default = "zero")]
+    pub like_count: u32,
+    pub is_pinned: bool,
+    pub author_id: String,
+    #[serde(default = "unknown")]
+    pub author: String,
+    pub author_is_verified: bool,
+    pub author_thumbnail: Url,
+    pub parent: Parent,
+    #[serde(deserialize_with = "edited_from_time_text", alias = "_time_text")]
+    pub edited: bool,
+    // Can't also be deserialized, as it's already used in 'edited'
+    // _time_text: String,
+    pub timestamp: i64,
+    pub author_url: Option<Url>,
+    pub author_is_uploader: bool,
+    pub is_favorited: bool,
+}
+
+fn unknown() -> String {
+    "<Unknown>".to_string()
+}
+fn zero() -> u32 {
+    0
+}
+fn edited_from_time_text<'de, D>(d: D) -> Result<bool, D::Error>
+where
+    D: Deserializer<'de>,
+{
+    let s = String::deserialize(d)?;
+    if s.contains(" (edited)") {
+        Ok(true)
+    } else {
+        Ok(false)
+    }
+}
+
+#[derive(Debug, Clone)]
+#[allow(clippy::module_name_repetitions)]
+pub struct CommentExt {
+    pub value: Comment,
+    pub replies: Vec<CommentExt>,
+}
+
+#[derive(Debug, Default)]
+pub struct Comments {
+    pub(super) vec: Vec<CommentExt>,
+}
+
+impl Comments {
+    pub fn new() -> Self {
+        Self::default()
+    }
+    pub fn push(&mut self, value: CommentExt) {
+        self.vec.push(value);
+    }
+    pub fn get_mut(&mut self, key: &str) -> Option<&mut CommentExt> {
+        self.vec.iter_mut().filter(|c| c.value.id.id == key).last()
+    }
+    pub fn insert(&mut self, key: &str, value: CommentExt) {
+        let parent = self
+            .vec
+            .iter_mut()
+            .filter(|c| c.value.id.id == key)
+            .last()
+            .expect("One of these should exist");
+        parent.push_reply(value);
+    }
+}
+impl CommentExt {
+    pub fn push_reply(&mut self, value: CommentExt) {
+        self.replies.push(value);
+    }
+    pub fn get_mut_reply(&mut self, key: &str) -> Option<&mut CommentExt> {
+        self.replies
+            .iter_mut()
+            .filter(|c| c.value.id.id == key)
+            .last()
+    }
+}
+
+impl From<Comment> for CommentExt {
+    fn from(value: Comment) -> Self {
+        Self {
+            replies: vec![],
+            value,
+        }
+    }
+}
diff --git a/yt/src/comments/description.rs b/crates/yt/src/comments/description.rs
index d22a40f..e8cb29d 100644
--- a/yt/src/comments/description.rs
+++ b/crates/yt/src/comments/description.rs
@@ -17,7 +17,7 @@ use crate::{
 };
 
 use anyhow::{Result, bail};
-use yt_dlp::wrapper::info_json::InfoJson;
+use yt_dlp::{InfoJson, json_cast};
 
 pub async fn description(app: &App) -> Result<()> {
     let description = get(app).await?;
@@ -39,6 +39,8 @@ pub async fn get(app: &App) -> Result<String> {
     );
 
     Ok(info_json
-        .description
-        .unwrap_or("<No description>".to_owned()))
+        .get("description")
+        .map(|val| json_cast!(val, as_str))
+        .unwrap_or("<No description>")
+        .to_owned())
 }
diff --git a/yt/src/comments/display.rs b/crates/yt/src/comments/display.rs
index 6166b2b..6166b2b 100644
--- a/yt/src/comments/display.rs
+++ b/crates/yt/src/comments/display.rs
diff --git a/yt/src/comments/mod.rs b/crates/yt/src/comments/mod.rs
index daecf8d..876146d 100644
--- a/yt/src/comments/mod.rs
+++ b/crates/yt/src/comments/mod.rs
@@ -11,11 +11,11 @@
 
 use std::mem;
 
-use anyhow::{Context, Result, bail};
-use comment::{CommentExt, Comments};
+use anyhow::{Result, bail};
+use comment::{Comment, CommentExt, Comments, Parent};
 use output::display_fmt_and_less;
 use regex::Regex;
-use yt_dlp::wrapper::info_json::{Comment, InfoJson, Parent};
+use yt_dlp::{InfoJson, json_cast};
 
 use crate::{
     app::App,
@@ -39,23 +39,25 @@ pub async fn get(app: &App) -> Result<Comments> {
             bail!("Could not find a currently playing video!");
         };
 
-    let mut info_json: InfoJson = get::video_info_json(&currently_playing_video)?.unreachable(
-        "A currently *playing* must be cached. And thus the info.json should be available",
+    let info_json: InfoJson = get::video_info_json(&currently_playing_video)?.unreachable(
+        "A currently *playing* video must be cached. And thus the info.json should be available",
     );
 
-    let base_comments = mem::take(&mut info_json.comments).with_context(|| {
-        format!(
+    let base_comments = if let Some(comments) = info_json.get("comments") {
+        json_cast!(comments, as_array)
+    } else {
+        bail!(
             "The video ('{}') does not have comments!",
             info_json
-                .title
-                .as_ref()
-                .unwrap_or(&("<No Title>".to_owned()))
+                .get("title")
+                .map(|val| json_cast!(val, as_str))
+                .unwrap_or("<No Title>")
         )
-    })?;
-    drop(info_json);
+    };
 
     let mut comments = Comments::new();
     for c in base_comments {
+        let c: Comment = serde_json::from_value(c.to_owned())?;
         if let Parent::Id(id) = &c.parent {
             comments.insert(&(id.clone()), CommentExt::from(c));
         } else {
diff --git a/yt/src/comments/output.rs b/crates/yt/src/comments/output.rs
index cb3a9c4..cb3a9c4 100644
--- a/yt/src/comments/output.rs
+++ b/crates/yt/src/comments/output.rs
diff --git a/yt/src/config/default.rs b/crates/yt/src/config/default.rs
index a1d327a..4ed643b 100644
--- a/yt/src/config/default.rs
+++ b/crates/yt/src/config/default.rs
@@ -14,19 +14,19 @@ use std::path::PathBuf;
 use anyhow::{Context, Result};
 
 fn get_runtime_path(name: &'static str) -> Result<PathBuf> {
-    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?;
+    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX);
     xdg_dirs
         .place_runtime_file(name)
         .with_context(|| format!("Failed to place runtime file: '{name}'"))
 }
 fn get_data_path(name: &'static str) -> Result<PathBuf> {
-    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?;
+    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX);
     xdg_dirs
         .place_data_file(name)
         .with_context(|| format!("Failed to place data file: '{name}'"))
 }
 fn get_config_path(name: &'static str) -> Result<PathBuf> {
-    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?;
+    let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX);
     xdg_dirs
         .place_config_file(name)
         .with_context(|| format!("Failed to place config file: '{name}'"))
diff --git a/yt/src/config/definitions.rs b/crates/yt/src/config/definitions.rs
index ce8c0d4..ce8c0d4 100644
--- a/yt/src/config/definitions.rs
+++ b/crates/yt/src/config/definitions.rs
diff --git a/yt/src/config/file_system.rs b/crates/yt/src/config/file_system.rs
index 2463e9d..2463e9d 100644
--- a/yt/src/config/file_system.rs
+++ b/crates/yt/src/config/file_system.rs
diff --git a/yt/src/config/mod.rs b/crates/yt/src/config/mod.rs
index a10f7c2..a10f7c2 100644
--- a/yt/src/config/mod.rs
+++ b/crates/yt/src/config/mod.rs
diff --git a/yt/src/constants.rs b/crates/yt/src/constants.rs
index 0f5b918..0f5b918 100644
--- a/yt/src/constants.rs
+++ b/crates/yt/src/constants.rs
diff --git a/crates/yt/src/download/download_options.rs b/crates/yt/src/download/download_options.rs
new file mode 100644
index 0000000..03c20ba
--- /dev/null
+++ b/crates/yt/src/download/download_options.rs
@@ -0,0 +1,118 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use anyhow::Context;
+use serde_json::{Value, json};
+use yt_dlp::{YoutubeDL, YoutubeDLOptions};
+
+use crate::{app::App, storage::video_database::YtDlpOptions};
+
+use super::progress_hook::wrapped_progress_hook;
+
+pub fn download_opts(app: &App, additional_opts: &YtDlpOptions) -> anyhow::Result<YoutubeDL> {
+    YoutubeDLOptions::new()
+        .with_progress_hook(wrapped_progress_hook)
+        .set("extract_flat", "in_playlist")
+        .set(
+            "extractor_args",
+            json! {
+            {
+                "youtube": {
+                    "comment_sort": [ "top" ],
+                    "max_comments": [ "150", "all", "100" ]
+                }
+            }
+            },
+        )
+        //.set("cookiesfrombrowser", json! {("firefox", "me.google", None::<String>, "youtube_dlp")})
+        .set("prefer_free_formats", true)
+        .set("ffmpeg_location", env!("FFMPEG_LOCATION"))
+        .set("format", "bestvideo[height<=?1080]+bestaudio/best")
+        .set("fragment_retries", 10)
+        .set("getcomments", true)
+        .set("ignoreerrors", false)
+        .set("retries", 10)
+        .set("writeinfojson", true)
+        // NOTE: This results in a constant warning message.  <2025-01-04>
+        //.set("writeannotations", true)
+        .set("writesubtitles", true)
+        .set("writeautomaticsub", true)
+        .set(
+            "outtmpl",
+            json! {
+            {
+                "default": app.config.paths.download_dir.join("%(channel)s/%(title)s.%(ext)s"),
+                "chapter": "%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s"
+            }
+            },
+        )
+        .set("compat_opts", json! {{}})
+        .set("forceprint", json! {{}})
+        .set("print_to_file", json! {{}})
+        .set("windowsfilenames", false)
+        .set("restrictfilenames", false)
+        .set("trim_file_names", false)
+        .set(
+            "postprocessors",
+            json! {
+            [
+                {
+                    "api": "https://sponsor.ajay.app",
+                    "categories": [
+                        "interaction",
+                        "intro",
+                        "music_offtopic",
+                        "sponsor",
+                        "outro",
+                        "poi_highlight",
+                        "preview",
+                        "selfpromo",
+                        "filler",
+                        "chapter"
+                    ],
+                    "key": "SponsorBlock",
+                    "when": "after_filter"
+                },
+                {
+                    "force_keyframes": false,
+                    "key": "ModifyChapters",
+                    "remove_chapters_patterns": [],
+                    "remove_ranges": [],
+                    "remove_sponsor_segments": [ "sponsor" ],
+                    "sponsorblock_chapter_title": "[SponsorBlock]: %(category_names)l"
+                },
+                {
+                    "add_chapters": true,
+                    "add_infojson": null,
+                    "add_metadata": false,
+                    "key": "FFmpegMetadata"
+                },
+                {
+                    "key": "FFmpegConcat",
+                    "only_multi_video": true,
+                    "when": "playlist"
+                }
+            ]
+            },
+        )
+        .set(
+            "subtitleslangs",
+            Value::Array(
+                additional_opts
+                    .subtitle_langs
+                    .split(',')
+                    .map(|val| Value::String(val.to_owned()))
+                    .collect::<Vec<_>>(),
+            ),
+        )
+        .build()
+        .context("Failed to instanciate download yt_dlp")
+}
diff --git a/yt/src/download/mod.rs b/crates/yt/src/download/mod.rs
index 984d400..110bf55 100644
--- a/yt/src/download/mod.rs
+++ b/crates/yt/src/download/mod.rs
@@ -29,9 +29,11 @@ use bytes::Bytes;
 use futures::{FutureExt, future::BoxFuture};
 use log::{debug, error, info, warn};
 use tokio::{fs, task::JoinHandle, time};
+use yt_dlp::{json_cast, json_get};
 
 #[allow(clippy::module_name_repetitions)]
 pub mod download_options;
+pub mod progress_hook;
 
 #[derive(Debug)]
 #[allow(clippy::module_name_repetitions)]
@@ -109,7 +111,7 @@ impl Downloader {
             }
         }
         let cache_allocation = Self::get_current_cache_allocation(app).await?;
-        let video_size = self.get_approx_video_size(app, next_video).await?;
+        let video_size = self.get_approx_video_size(app, next_video)?;
 
         if video_size >= max_cache_size {
             error!(
@@ -291,7 +293,7 @@ impl Downloader {
         dir_size(read_dir_result).await
     }
 
-    async fn get_approx_video_size(&mut self, app: &App, video: &Video) -> Result<u64> {
+    fn get_approx_video_size(&mut self, app: &App, video: &Video) -> Result<u64> {
         if let Some(value) = self.video_size_cache.get(&video.extractor_hash) {
             Ok(*value)
         } else {
@@ -299,25 +301,25 @@ impl Downloader {
             let add_opts = YtDlpOptions {
                 subtitle_langs: String::new(),
             };
-            let opts = &download_opts(app, &add_opts);
+            let yt_dlp = download_opts(app, &add_opts)?;
 
-            let result = yt_dlp::extract_info(opts, &video.url, false, true)
-                .await
+            let result = yt_dlp
+                .extract_info(&video.url, false, true)
                 .with_context(|| {
                     format!("Failed to extract video information: '{}'", video.title)
                 })?;
 
-            let size = if let Some(val) = result.filesize {
-                val
-            } else if let Some(val) = result.filesize_approx {
-                val
-            } else if result.duration.is_some() && result.tbr.is_some() {
+            let size = if let Some(val) = result.get("filesize") {
+                json_cast!(val, as_u64)
+            } else if let Some(val) = result.get("filesize_approx") {
+                json_cast!(val, as_u64)
+            } else if result.get("duration").is_some() && result.get("tbr").is_some() {
                 #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
-                let duration = result.duration.expect("Is some").ceil() as u64;
+                let duration = json_get!(result, "duration", as_f64).ceil() as u64;
 
                 // TODO: yt_dlp gets this from the format
                 #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
-                let tbr = result.tbr.expect("Is Some").ceil() as u64;
+                let tbr = json_get!(result, "tbr", as_f64).ceil() as u64;
 
                 duration * tbr * (1000 / 8)
             } else {
@@ -342,9 +344,10 @@ impl Downloader {
         debug!("Download started: {}", &video.title);
 
         let addional_opts = get_video_yt_dlp_opts(app, &video.extractor_hash).await?;
+        let yt_dlp = download_opts(app, &addional_opts)?;
 
-        let result = yt_dlp::download(&[video.url.clone()], &download_opts(app, &addional_opts))
-            .await
+        let result = yt_dlp
+            .download(&[video.url.to_owned()])
             .with_context(|| format!("Failed to download video: '{}'", video.title))?;
 
         assert_eq!(result.len(), 1);
diff --git a/crates/yt/src/download/progress_hook.rs b/crates/yt/src/download/progress_hook.rs
new file mode 100644
index 0000000..b75ec00
--- /dev/null
+++ b/crates/yt/src/download/progress_hook.rs
@@ -0,0 +1,188 @@
+use std::{
+    io::{Write, stderr},
+    process,
+};
+
+use bytes::Bytes;
+use log::{Level, log_enabled};
+use yt_dlp::mk_python_function;
+
+use crate::{
+    ansi_escape_codes::{clear_whole_line, move_to_col},
+    select::selection_file::duration::MaybeDuration,
+};
+
+/// # Panics
+/// If expectations fail.
+#[allow(clippy::too_many_lines, clippy::needless_pass_by_value)]
+pub fn progress_hook(
+    input: serde_json::Map<String, serde_json::Value>,
+) -> Result<(), std::io::Error> {
+    // Only add the handler, if the log-level is higher than Debug (this avoids covering debug
+    // messages).
+    if log_enabled!(Level::Debug) {
+        return Ok(());
+    }
+
+    macro_rules! get {
+        (@interrogate $item:ident, $type_fun:ident, $get_fun:ident, $name:expr) => {{
+            let a = $item.get($name).expect(concat!(
+                "The field '",
+                stringify!($name),
+                "' should exist."
+            ));
+
+            if a.$type_fun() {
+                a.$get_fun().expect(
+                    "The should have been checked in the if guard, so unpacking here is fine",
+                )
+            } else {
+                panic!(
+                    "Value {} => \n{}\n is not of type: {}",
+                    $name,
+                    a,
+                    stringify!($type_fun)
+                );
+            }
+        }};
+
+        ($type_fun:ident, $get_fun:ident, $name1:expr, $name2:expr) => {{
+            let a = get! {@interrogate input, is_object, as_object, $name1};
+            let b = get! {@interrogate a, $type_fun, $get_fun, $name2};
+            b
+        }};
+
+        ($type_fun:ident, $get_fun:ident, $name:expr) => {{
+            get! {@interrogate input, $type_fun, $get_fun, $name}
+        }};
+    }
+
+    macro_rules! default_get {
+        (@interrogate $item:ident, $default:expr, $get_fun:ident, $name:expr) => {{
+            let a = if let Some(field) = $item.get($name) {
+                field.$get_fun().unwrap_or($default)
+            } else {
+                $default
+            };
+            a
+        }};
+
+        ($get_fun:ident, $default:expr, $name1:expr, $name2:expr) => {{
+            let a = get! {@interrogate input, is_object, as_object, $name1};
+            let b = default_get! {@interrogate a, $default, $get_fun, $name2};
+            b
+        }};
+
+        ($get_fun:ident, $default:expr, $name:expr) => {{
+            default_get! {@interrogate input, $default, $get_fun, $name}
+        }};
+    }
+
+    macro_rules! c {
+        ($color:expr, $format:expr) => {
+            format!("\x1b[{}m{}\x1b[0m", $color, $format)
+        };
+    }
+
+    #[allow(clippy::items_after_statements)]
+    fn format_bytes(bytes: u64) -> String {
+        let bytes = Bytes::new(bytes);
+        bytes.to_string()
+    }
+
+    #[allow(clippy::items_after_statements)]
+    fn format_speed(speed: f64) -> String {
+        #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
+        let bytes = Bytes::new(speed.floor() as u64);
+        format!("{bytes}/s")
+    }
+
+    let get_title = || -> String {
+        match get! {is_string, as_str, "info_dict", "ext"} {
+            "vtt" => {
+                format!(
+                    "Subtitles ({})",
+                    default_get! {as_str, "<No Subtitle Language>", "info_dict", "name"}
+                )
+            }
+            "webm" | "mp4" | "mp3" | "m4a" => {
+                default_get! { as_str, "<No title>", "info_dict", "title"}.to_owned()
+            }
+            other => panic!("The extension '{other}' is not yet implemented"),
+        }
+    };
+
+    match get! {is_string, as_str, "status"} {
+        "downloading" => {
+            let elapsed = default_get! {as_f64, 0.0f64, "elapsed"};
+            let eta = default_get! {as_f64, 0.0, "eta"};
+            let speed = default_get! {as_f64, 0.0, "speed"};
+
+            let downloaded_bytes = get! {is_u64, as_u64, "downloaded_bytes"};
+            let (total_bytes, bytes_is_estimate): (u64, &'static str) = {
+                let total_bytes = default_get!(as_u64, 0, "total_bytes");
+                if total_bytes == 0 {
+                    let maybe_estimate = default_get!(as_u64, 0, "total_bytes_estimate");
+
+                    #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
+                    if maybe_estimate == 0 {
+                        // The download speed should be in bytes per second and the eta in seconds.
+                        // Thus multiplying them gets us the raw bytes (which were estimated by `yt_dlp`, from their `info.json`)
+                        let bytes_still_needed = (speed * eta).ceil() as u64;
+
+                        (downloaded_bytes + bytes_still_needed, "~")
+                    } else {
+                        (maybe_estimate, "~")
+                    }
+                } else {
+                    (total_bytes, "")
+                }
+            };
+
+            let percent: f64 = {
+                if total_bytes == 0 {
+                    100.0
+                } else {
+                    #[allow(
+                        clippy::cast_possible_truncation,
+                        clippy::cast_sign_loss,
+                        clippy::cast_precision_loss
+                    )]
+                    {
+                        (downloaded_bytes as f64 / total_bytes as f64) * 100.0
+                    }
+                }
+            };
+
+            clear_whole_line();
+            move_to_col(1);
+
+            eprint!(
+                "'{}' [{}/{} at {}] -> [{} of {}{} {}] ",
+                c!("34;1", get_title()),
+                c!("33;1", MaybeDuration::from_secs_f64(elapsed)),
+                c!("33;1", MaybeDuration::from_secs_f64(eta)),
+                c!("32;1", format_speed(speed)),
+                c!("31;1", format_bytes(downloaded_bytes)),
+                c!("31;1", bytes_is_estimate),
+                c!("31;1", format_bytes(total_bytes)),
+                c!("36;1", format!("{:.02}%", percent))
+            );
+            stderr().flush()?;
+        }
+        "finished" => {
+            eprintln!("-> Finished downloading.");
+        }
+        "error" => {
+            // TODO: This should probably return an Err. But I'm not so sure where the error would
+            // bubble up to (i.e., who would catch it) <2025-01-21>
+            eprintln!("-> Error while downloading: {}", get_title());
+            process::exit(1);
+        }
+        other => unreachable!("'{other}' should not be a valid state!"),
+    }
+
+    Ok(())
+}
+
+mk_python_function!(progress_hook, wrapped_progress_hook);
diff --git a/yt/src/main.rs b/crates/yt/src/main.rs
index ffb3e14..39f52f4 100644
--- a/yt/src/main.rs
+++ b/crates/yt/src/main.rs
@@ -13,16 +13,16 @@
 // to print it anyways.
 #![allow(clippy::missing_errors_doc)]
 
-use std::{fs, sync::Arc};
+use std::sync::Arc;
 
 use anyhow::{Context, Result, bail};
 use app::App;
 use bytes::Bytes;
 use cache::{invalidate, maintain};
 use clap::Parser;
-use cli::{CacheCommand, CheckCommand, SelectCommand, SubscriptionCommand, VideosCommand};
+use cli::{CacheCommand, SelectCommand, SubscriptionCommand, VideosCommand};
 use config::Config;
-use log::info;
+use log::{error, info};
 use select::cmds::handle_select_cmd;
 use storage::video_database::get::video_by_hash;
 use tokio::{
@@ -30,10 +30,10 @@ use tokio::{
     io::{BufReader, stdin},
     task::JoinHandle,
 };
-use yt_dlp::wrapper::info_json::InfoJson;
 
 use crate::{cli::Command, storage::subscriptions};
 
+pub mod ansi_escape_codes;
 pub mod app;
 pub mod cli;
 pub mod unreachable;
@@ -200,7 +200,7 @@ async fn main() -> Result<()> {
                     subscribe::import(&app, BufReader::new(f), force).await?;
                 } else {
                     subscribe::import(&app, BufReader::new(stdin()), force).await?;
-                };
+                }
             }
         },
 
@@ -215,17 +215,6 @@ async fn main() -> Result<()> {
             CacheCommand::Maintain { all } => maintain(&app, all).await?,
         },
 
-        Command::Check { command } => match command {
-            CheckCommand::InfoJson { path } => {
-                let string = fs::read_to_string(&path)
-                    .with_context(|| format!("Failed to read '{}' to string!", path.display()))?;
-
-                drop(
-                    serde_json::from_str::<InfoJson>(&string)
-                        .context("Failed to deserialize value")?,
-                );
-            }
-        },
         Command::Comments {} => {
             comments::comments(&app).await?;
         }
@@ -242,15 +231,17 @@ async fn dowa(arc_app: Arc<App>) -> Result<()> {
     info!("Max cache size: '{}'", max_cache_size);
 
     let arc_app_clone = Arc::clone(&arc_app);
-    let download: JoinHandle<Result<()>> = tokio::spawn(async move {
-        download::Downloader::new()
+    let download: JoinHandle<()> = tokio::spawn(async move {
+        let result = download::Downloader::new()
             .consume(arc_app_clone, max_cache_size.as_u64())
-            .await?;
+            .await;
 
-        Ok(())
+        if let Err(err) = result {
+            error!("Error from downloader: {err:?}");
+        }
     });
 
     watch::watch(arc_app).await?;
-    download.await??;
+    download.await?;
     Ok(())
 }
diff --git a/yt/src/select/cmds/add.rs b/crates/yt/src/select/cmds/add.rs
index da58ec2..387b3a1 100644
--- a/yt/src/select/cmds/add.rs
+++ b/crates/yt/src/select/cmds/add.rs
@@ -14,15 +14,13 @@ use crate::{
     storage::video_database::{
         self, extractor_hash::ExtractorHash, get::get_all_hashes, set::add_video,
     },
-    unreachable::Unreachable,
     update::video_entry_to_video,
 };
 
 use anyhow::{Context, Result, bail};
 use log::{error, warn};
-use serde_json::{Map, Value};
 use url::Url;
-use yt_dlp::wrapper::info_json::InfoType;
+use yt_dlp::{InfoJson, YoutubeDL, json_cast, json_get};
 
 #[allow(clippy::too_many_lines)]
 pub(super) async fn add(
@@ -32,17 +30,11 @@ pub(super) async fn add(
     stop: Option<usize>,
 ) -> Result<()> {
     for url in urls {
-        async fn process_and_add(
-            app: &App,
-            entry: yt_dlp::wrapper::info_json::InfoJson,
-            opts: &Map<String, Value>,
-        ) -> Result<()> {
-            let url = entry
-                .url
-                .unreachable("`yt_dlp` should guarantee that this is Some at this point");
-
-            let entry = yt_dlp::extract_info(opts, &url, false, true)
-                .await
+        async fn process_and_add(app: &App, entry: InfoJson, yt_dlp: &YoutubeDL) -> Result<()> {
+            let url = json_get!(entry, "url", as_str).parse()?;
+
+            let entry = yt_dlp
+                .extract_info(&url, false, true)
                 .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?;
 
             add_entry(app, entry).await?;
@@ -50,19 +42,13 @@ pub(super) async fn add(
             Ok(())
         }
 
-        async fn add_entry(app: &App, entry: yt_dlp::wrapper::info_json::InfoJson) -> Result<()> {
+        async fn add_entry(app: &App, entry: InfoJson) -> Result<()> {
             // We have to re-fetch all hashes every time, because a user could try to add the same
             // URL twice (for whatever reason.)
             let hashes = get_all_hashes(app)
                 .await
                 .context("Failed to fetch all video hashes")?;
-            let extractor_hash = blake3::hash(
-                entry
-                    .id
-                    .as_ref()
-                    .expect("This should be some at this point")
-                    .as_bytes(),
-            );
+            let extractor_hash = blake3::hash(json_get!(entry, "id", as_str).as_bytes());
             if hashes.contains(&extractor_hash) {
                 error!(
                     "Video '{}'{} is already in the database. Skipped adding it",
@@ -72,17 +58,17 @@ pub(super) async fn add(
                         .with_context(|| format!(
                             "Failed to format hash of video '{}' as short hash",
                             entry
-                                .url
-                                .map_or("<Unknown video Url>".to_owned(), |url| url.to_string())
+                                .get("url")
+                                .map_or("<Unknown video Url>".to_owned(), ToString::to_string)
                         ))?,
                     entry
-                        .title
+                        .get("title")
                         .map_or(String::new(), |title| format!(" ('{title}')"))
                 );
                 return Ok(());
             }
 
-            let video = video_entry_to_video(entry, None)?;
+            let video = video_entry_to_video(&entry, None)?;
             add_video(app, video.clone()).await?;
 
             println!("{}", &video.to_line_display(app).await?);
@@ -90,16 +76,19 @@ pub(super) async fn add(
             Ok(())
         }
 
-        let opts = download_opts(app, &video_database::YtDlpOptions {
-            subtitle_langs: String::new(),
-        });
+        let yt_dlp = download_opts(
+            app,
+            &video_database::YtDlpOptions {
+                subtitle_langs: String::new(),
+            },
+        )?;
 
-        let entry = yt_dlp::extract_info(&opts, &url, false, true)
-            .await
+        let entry = yt_dlp
+            .extract_info(&url, false, true)
             .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?;
 
-        match entry._type {
-            Some(InfoType::Video) => {
+        match entry.get("_type").map(|val| json_cast!(val, as_str)) {
+            Some("Video") => {
                 add_entry(app, entry).await?;
                 if start.is_some() || stop.is_some() {
                     warn!(
@@ -107,13 +96,14 @@ pub(super) async fn add(
                     );
                 }
             }
-            Some(InfoType::Playlist) => {
-                if let Some(entries) = entry.entries {
+            Some("Playlist") => {
+                if let Some(entries) = entry.get("entries") {
+                    let entries = json_cast!(entries, as_array);
                     let start = start.unwrap_or(0);
                     let stop = stop.unwrap_or(entries.len() - 1);
 
-                    let mut respected_entries: Vec<_> = take_vector(entries, start, stop)
-                        .with_context(|| {
+                    let respected_entries =
+                        take_vector(entries, start, stop).with_context(|| {
                             format!(
                                 "Failed to take entries starting at: {start} and ending with {stop}"
                             )
@@ -123,11 +113,23 @@ pub(super) async fn add(
                         warn!("No entries found, after applying your start/stop limits.");
                     } else {
                         // Pre-warm the cache
-                        process_and_add(app, respected_entries.remove(0), &opts).await?;
+                        process_and_add(
+                            app,
+                            json_cast!(respected_entries[0], as_object).to_owned(),
+                            &yt_dlp,
+                        )
+                        .await?;
+                        let respected_entries = &respected_entries[1..];
 
                         let futures: Vec<_> = respected_entries
-                            .into_iter()
-                            .map(|entry| process_and_add(app, entry, &opts))
+                            .iter()
+                            .map(|entry| {
+                                process_and_add(
+                                    app,
+                                    json_cast!(entry, as_object).to_owned(),
+                                    &yt_dlp,
+                                )
+                            })
                             .collect();
 
                         for fut in futures {
@@ -148,7 +150,7 @@ pub(super) async fn add(
     Ok(())
 }
 
-fn take_vector<T>(vector: Vec<T>, start: usize, stop: usize) -> Result<Vec<T>> {
+fn take_vector<T>(vector: &[T], start: usize, stop: usize) -> Result<&[T]> {
     let length = vector.len();
 
     if stop >= length {
@@ -157,26 +159,7 @@ fn take_vector<T>(vector: Vec<T>, start: usize, stop: usize) -> Result<Vec<T>> {
         );
     }
 
-    let end_skip = {
-        let base = length
-            .checked_sub(stop)
-            .unreachable("The check above should have caught this case.");
-
-        base.checked_sub(1)
-            .unreachable("The check above should have caught this case.")
-    };
-
-    // NOTE: We're using this instead of the `vector[start..=stop]` notation, because I wanted to
-    // avoid the needed allocation to turn the slice into a vector. <2025-01-04>
-
-    // TODO: This function could also just return a slice, but oh well.. <2025-01-04>
-    Ok(vector
-        .into_iter()
-        .skip(start)
-        .rev()
-        .skip(end_skip)
-        .rev()
-        .collect())
+    Ok(&vector[start..=stop])
 }
 
 #[cfg(test)]
@@ -187,7 +170,7 @@ mod test {
     fn test_vector_take() {
         let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        let new_vec = take_vector(vec, 2, 8).unwrap();
+        let new_vec = take_vector(&vec, 2, 8).unwrap();
 
         assert_eq!(new_vec, vec![2, 3, 4, 5, 6, 7, 8]);
     }
@@ -196,13 +179,13 @@ mod test {
     fn test_vector_take_overflow() {
         let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        assert!(take_vector(vec, 0, 12).is_err());
+        assert!(take_vector(&vec, 0, 12).is_err());
     }
 
     #[test]
     fn test_vector_take_equal() {
         let vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        assert!(take_vector(vec, 0, 11).is_err());
+        assert!(take_vector(&vec, 0, 11).is_err());
     }
 }
diff --git a/yt/src/select/cmds/mod.rs b/crates/yt/src/select/cmds/mod.rs
index ea41f99..aabcd3d 100644
--- a/yt/src/select/cmds/mod.rs
+++ b/crates/yt/src/select/cmds/mod.rs
@@ -51,10 +51,15 @@ pub async fn handle_select_cmd(
                 is_focused,
             } = video.status
             {
-                handle_status_change(app, shared, line_number, VideoStatus::Cached {
-                    cache_path,
-                    is_focused,
-                })
+                handle_status_change(
+                    app,
+                    shared,
+                    line_number,
+                    VideoStatus::Cached {
+                        cache_path,
+                        is_focused,
+                    },
+                )
                 .await?;
             } else {
                 handle_status_change(app, shared, line_number, VideoStatus::Watch).await?;
diff --git a/yt/src/select/mod.rs b/crates/yt/src/select/mod.rs
index 54db65c..8db9ae3 100644
--- a/yt/src/select/mod.rs
+++ b/crates/yt/src/select/mod.rs
@@ -53,12 +53,15 @@ pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<(
         let matching_videos = if done {
             get::videos(app, VideoStatusMarker::ALL).await?
         } else {
-            get::videos(app, &[
-                VideoStatusMarker::Pick,
-                //
-                VideoStatusMarker::Watch,
-                VideoStatusMarker::Cached,
-            ])
+            get::videos(
+                app,
+                &[
+                    VideoStatusMarker::Pick,
+                    //
+                    VideoStatusMarker::Watch,
+                    VideoStatusMarker::Cached,
+                ],
+            )
             .await?
         };
 
diff --git a/yt/src/select/selection_file/duration.rs b/crates/yt/src/select/selection_file/duration.rs
index 77c4fc5..77c4fc5 100644
--- a/yt/src/select/selection_file/duration.rs
+++ b/crates/yt/src/select/selection_file/duration.rs
diff --git a/yt/src/select/selection_file/help.str b/crates/yt/src/select/selection_file/help.str
index e3cc347..e3cc347 100644
--- a/yt/src/select/selection_file/help.str
+++ b/crates/yt/src/select/selection_file/help.str
diff --git a/yt/src/select/selection_file/help.str.license b/crates/yt/src/select/selection_file/help.str.license
index a0e196c..a0e196c 100644
--- a/yt/src/select/selection_file/help.str.license
+++ b/crates/yt/src/select/selection_file/help.str.license
diff --git a/yt/src/select/selection_file/mod.rs b/crates/yt/src/select/selection_file/mod.rs
index abd26c4..abd26c4 100644
--- a/yt/src/select/selection_file/mod.rs
+++ b/crates/yt/src/select/selection_file/mod.rs
diff --git a/yt/src/status/mod.rs b/crates/yt/src/status/mod.rs
index bc45cfb..18bef7d 100644
--- a/yt/src/status/mod.rs
+++ b/crates/yt/src/status/mod.rs
@@ -87,6 +87,15 @@ pub async fn show(app: &App) -> Result<()> {
         }
     };
 
+    let watch_rate: f64 = {
+        fn to_f64(input: usize) -> f64 {
+            f64::from(u32::try_from(input).expect("This should never exceed u32::MAX"))
+        }
+
+        let count = to_f64(watched_videos_len) / (to_f64(drop_videos_len) + to_f64(dropped_videos_len));
+        count * 100.0
+    };
+
     let cache_usage_raw = Downloader::get_current_cache_allocation(app)
         .await
         .context("Failed to get current cache allocation")?;
@@ -97,7 +106,7 @@ Picked   Videos: {picked_videos_len}
 
 Watch    Videos: {watch_videos_len}
 Cached   Videos: {cached_videos_len}
-Watched  Videos: {watched_videos_len}
+Watched  Videos: {watched_videos_len} (watch rate: {watch_rate:.2} %)
 
 Drop     Videos: {drop_videos_len}
 Dropped  Videos: {dropped_videos_len}
diff --git a/yt/src/storage/migrate/mod.rs b/crates/yt/src/storage/migrate/mod.rs
index badeb6f..953d079 100644
--- a/yt/src/storage/migrate/mod.rs
+++ b/crates/yt/src/storage/migrate/mod.rs
@@ -21,6 +21,59 @@ use sqlx::{Sqlite, SqlitePool, Transaction, query};
 
 use crate::app::App;
 
+macro_rules! make_upgrade {
+    ($app:expr, $old_version:expr, $new_version:expr, $sql_name:expr) => {
+        add_error_context(
+            async {
+                let mut tx = $app
+                    .database
+                    .begin()
+                    .await
+                    .context("Failed to start the update transaction")?;
+                debug!("Migrating: {} -> {}", $old_version, $new_version);
+
+                sqlx::raw_sql(include_str!($sql_name))
+                    .execute(&mut *tx)
+                    .await
+                    .context("Failed to run the update sql script")?;
+
+                set_db_version(
+                    &mut tx,
+                    if $old_version == Self::Empty {
+                        // There is no previous version we would need to remove
+                        None
+                    } else {
+                        Some($old_version)
+                    },
+                    $new_version,
+                )
+                .await
+                .with_context(|| format!("Failed to set the new version ({})", $new_version))?;
+
+                tx.commit()
+                    .await
+                    .context("Failed to commit the update transaction")?;
+
+                // NOTE: This is needed, so that sqlite "sees" our changes to the table
+                // without having to reconnect. <2025-02-18>
+                query!("VACUUM")
+                    .execute(&$app.database)
+                    .await
+                    .context("Failed to vacuum database")?;
+
+                Ok(())
+            },
+            $new_version,
+        )
+        .await?;
+
+        Box::pin($new_version.update($app)).await.context(concat!(
+            "While updating to version: ",
+            stringify!($new_version)
+        ))
+    };
+}
+
 #[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
 pub enum DbVersion {
     /// The database is not yet initialized.
@@ -35,8 +88,11 @@ pub enum DbVersion {
 
     /// Introduced: 2025-02-18.
     Two,
+
+    /// Introduced: 2025-03-21.
+    Three,
 }
-const CURRENT_VERSION: DbVersion = DbVersion::Two;
+const CURRENT_VERSION: DbVersion = DbVersion::Three;
 
 async fn add_error_context(
     function: impl Future<Output = Result<()>>,
@@ -44,7 +100,7 @@ async fn add_error_context(
 ) -> Result<()> {
     function
         .await
-        .with_context(|| format!("Format failed to migrate database to version: {level}"))
+        .with_context(|| format!("Failed to migrate database to version: {level}"))
 }
 
 async fn set_db_version(
@@ -83,21 +139,26 @@ async fn set_db_version(
 impl DbVersion {
     fn as_sql_integer(self) -> i32 {
         match self {
-            DbVersion::Empty => unreachable!("A empty version does not have an associated integer"),
             DbVersion::Zero => 0,
             DbVersion::One => 1,
             DbVersion::Two => 2,
+            DbVersion::Three => 3,
+
+            DbVersion::Empty => unreachable!("A empty version does not have an associated integer"),
         }
     }
+
     fn from_db(number: i64, namespace: &str) -> Result<Self> {
         match (number, namespace) {
             (0, "yt") => Ok(DbVersion::Zero),
             (1, "yt") => Ok(DbVersion::One),
             (2, "yt") => Ok(DbVersion::Two),
+            (3, "yt") => Ok(DbVersion::Three),
 
             (0, other) => bail!("Db version is Zero, but got unknown namespace: '{other}'"),
             (1, other) => bail!("Db version is One, but got unknown namespace: '{other}'"),
             (2, other) => bail!("Db version is Two, but got unknown namespace: '{other}'"),
+            (3, other) => bail!("Db version is Three, but got unknown namespace: '{other}'"),
 
             (other, "yt") => bail!("Got unkown version for 'yt' namespace: {other}"),
             (num, nasp) => bail!("Got unkown version number ({num}) and namespace ('{nasp}')"),
@@ -111,126 +172,24 @@ impl DbVersion {
     #[allow(clippy::too_many_lines)]
     async fn update(self, app: &App) -> Result<()> {
         match self {
-            DbVersion::Empty => {
-                add_error_context(
-                    async {
-                        let mut tx = app
-                            .database
-                            .begin()
-                            .await
-                            .context("Failed to start transaction")?;
-                        debug!("Migrate: Empty -> Zero");
-
-                        sqlx::raw_sql(include_str!("./sql/00_empty_to_zero.sql"))
-                            .execute(&mut *tx)
-                            .await
-                            .context("Failed to execute sql update script")?;
-
-                        set_db_version(&mut tx, None, DbVersion::Zero)
-                            .await
-                            .context("Failed to set new version")?;
-
-                        tx.commit()
-                            .await
-                            .context("Failed to commit changes")?;
-
-                        // NOTE: This is needed, so that sqlite "sees" our changes to the table
-                        // without having to reconnect. <2025-02-18>
-                        query!("VACUUM")
-                            .execute(&app.database)
-                            .await
-                            .context("Failed to vacuum database")?;
-
-                        Ok(())
-                    },
-                    DbVersion::One,
-                )
-                .await?;
-                Box::pin(Self::Zero.update(app)).await
+            Self::Empty => {
+                make_upgrade! {app, Self::Empty, Self::Zero, "./sql/0_Empty_to_Zero.sql"}
             }
 
-            DbVersion::Zero => {
-                add_error_context(
-                    async {
-                        let mut tx = app
-                            .database
-                            .begin()
-                            .await
-                            .context("Failed to start transaction")?;
-                        debug!("Migrate: Zero -> One");
-
-                        sqlx::raw_sql(include_str!("./sql/01_zero_to_one.sql"))
-                            .execute(&mut *tx)
-                            .await
-                            .context("Failed to execute the update sql script")?;
-
-                        set_db_version(&mut tx, Some(DbVersion::Zero), DbVersion::One)
-                            .await
-                            .context("Failed to set the new version")?;
-
-                        tx.commit()
-                            .await
-                            .context("Failed to commit the update transaction")?;
-
-                        // NOTE: This is needed, so that sqlite "sees" our changes to the table
-                        // without having to reconnect. <2025-02-18>
-                        query!("VACUUM")
-                            .execute(&app.database)
-                            .await
-                            .context("Failed to vacuum database")?;
-
-                        Ok(())
-                    },
-                    DbVersion::Zero,
-                )
-                .await?;
-
-                Box::pin(Self::One.update(app)).await
+            Self::Zero => {
+                make_upgrade! {app, Self::Zero, Self::One, "./sql/1_Zero_to_One.sql"}
             }
 
-            DbVersion::One => {
-                add_error_context(
-                    async {
-                        let mut tx = app
-                            .database
-                            .begin()
-                            .await
-                            .context("Failed to start the update transaction")?;
-                        debug!("Migrate: One -> Two");
-
-                        sqlx::raw_sql(include_str!("./sql/02_one_to_two.sql"))
-                            .execute(&mut *tx)
-                            .await
-                            .context("Failed to run the update sql script")?;
-
-                        set_db_version(&mut tx, Some(DbVersion::One), DbVersion::Two)
-                            .await
-                            .context("Failed to set the new version")?;
-
-                        tx.commit()
-                            .await
-                            .context("Failed to commit the update transaction")?;
-
-                        // NOTE: This is needed, so that sqlite "sees" our changes to the table
-                        // without having to reconnect. <2025-02-18>
-                        query!("VACUUM")
-                            .execute(&app.database)
-                            .await
-                            .context("Failed to vacuum database")?;
-
-                        Ok(())
-                    },
-                    DbVersion::One,
-                )
-                .await?;
+            Self::One => {
+                make_upgrade! {app, Self::One, Self::Two, "./sql/2_One_to_Two.sql"}
+            }
 
-                Box::pin(Self::Two.update(app))
-                    .await
-                    .context("Failed to update to version: Three")
+            Self::Two => {
+                make_upgrade! {app, Self::Two, Self::Three, "./sql/3_Two_to_Three.sql"}
             }
 
             // This is the current_version
-            DbVersion::Two => {
+            Self::Three => {
                 assert_eq!(self, CURRENT_VERSION);
                 assert_eq!(self, get_version(app).await?);
                 Ok(())
diff --git a/yt/src/storage/migrate/sql/00_empty_to_zero.sql b/crates/yt/src/storage/migrate/sql/0_Empty_to_Zero.sql
index d703bfc..d703bfc 100644
--- a/yt/src/storage/migrate/sql/00_empty_to_zero.sql
+++ b/crates/yt/src/storage/migrate/sql/0_Empty_to_Zero.sql
diff --git a/yt/src/storage/migrate/sql/01_zero_to_one.sql b/crates/yt/src/storage/migrate/sql/1_Zero_to_One.sql
index da9315b..da9315b 100644
--- a/yt/src/storage/migrate/sql/01_zero_to_one.sql
+++ b/crates/yt/src/storage/migrate/sql/1_Zero_to_One.sql
diff --git a/yt/src/storage/migrate/sql/02_one_to_two.sql b/crates/yt/src/storage/migrate/sql/2_One_to_Two.sql
index 806de07..806de07 100644
--- a/yt/src/storage/migrate/sql/02_one_to_two.sql
+++ b/crates/yt/src/storage/migrate/sql/2_One_to_Two.sql
diff --git a/crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql b/crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql
new file mode 100644
index 0000000..b33f849
--- /dev/null
+++ b/crates/yt/src/storage/migrate/sql/3_Two_to_Three.sql
@@ -0,0 +1,85 @@
+-- yt - A fully featured command line YouTube client
+--
+-- Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+-- SPDX-License-Identifier: GPL-3.0-or-later
+--
+-- This file is part of Yt.
+--
+-- You should have received a copy of the License along with this program.
+-- If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+
+-- 1. Create new table
+-- 2. Copy data
+-- 3. Drop old table
+-- 4. Rename new into old
+
+-- remove the original TRANSACTION
+COMMIT TRANSACTION;
+
+-- tweak config
+PRAGMA foreign_keys=OFF;
+
+-- start your own TRANSACTION
+BEGIN TRANSACTION;
+
+CREATE TABLE videos_new (
+    cache_path                  TEXT    UNIQUE                       CHECK (CASE
+                                                                              WHEN cache_path IS NOT NULL THEN status == 2
+                                                                              ELSE 1
+                                                                            END),
+    description                 TEXT,
+    duration                    REAL,
+    extractor_hash              TEXT    UNIQUE NOT NULL PRIMARY KEY,
+    last_status_change          INTEGER        NOT NULL,
+    parent_subscription_name    TEXT,
+    priority                    INTEGER        NOT NULL DEFAULT 0,
+    publish_date                INTEGER,
+    status                      INTEGER        NOT NULL DEFAULT 0    CHECK (status IN (0, 1, 2, 3, 4, 5) AND
+                                                                            CASE
+                                                                              WHEN status == 2 THEN cache_path IS NOT NULL
+                                                                              WHEN status != 2 THEN cache_path IS NULL
+                                                                              ELSE 1
+                                                                            END),
+    thumbnail_url               TEXT,
+    title                       TEXT           NOT NULL,
+    url                         TEXT    UNIQUE NOT NULL,
+    is_focused                  INTEGER UNIQUE          DEFAULT NULL CHECK (CASE
+                                                                              WHEN is_focused IS NOT NULL THEN is_focused == 1
+                                                                              ELSE 1
+                                                                            END),
+    watch_progress              INTEGER        NOT NULL DEFAULT 0    CHECK (watch_progress <= duration)
+) STRICT;
+
+INSERT INTO videos_new SELECT
+    videos.cache_path,
+    videos.description,
+    videos.duration,
+    videos.extractor_hash,
+    videos.last_status_change,
+    videos.parent_subscription_name,
+    videos.priority,
+    videos.publish_date,
+    videos.status,
+    videos.thumbnail_url,
+    videos.title,
+    videos.url,
+    dummy.is_focused,
+    videos.watch_progress
+FROM videos, (SELECT NULL AS is_focused) AS dummy;
+
+DROP TABLE videos;
+
+ALTER TABLE videos_new RENAME TO videos;
+
+-- check foreign key constraint still upholding.
+PRAGMA foreign_key_check;
+
+-- commit your own TRANSACTION
+COMMIT TRANSACTION;
+
+-- rollback all config you setup before.
+PRAGMA foreign_keys=ON;
+
+-- start a new TRANSACTION to let migrator commit it.
+BEGIN TRANSACTION;
diff --git a/yt/src/storage/mod.rs b/crates/yt/src/storage/mod.rs
index 8653eb3..d352b41 100644
--- a/yt/src/storage/mod.rs
+++ b/crates/yt/src/storage/mod.rs
@@ -9,6 +9,6 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
+pub mod migrate;
 pub mod subscriptions;
 pub mod video_database;
-pub mod migrate;
diff --git a/yt/src/storage/subscriptions.rs b/crates/yt/src/storage/subscriptions.rs
index 3673eee..6c0d08a 100644
--- a/yt/src/storage/subscriptions.rs
+++ b/crates/yt/src/storage/subscriptions.rs
@@ -15,10 +15,9 @@ use std::collections::HashMap;
 
 use anyhow::Result;
 use log::debug;
-use serde_json::{Value, json};
 use sqlx::query;
 use url::Url;
-use yt_dlp::wrapper::info_json::InfoType;
+use yt_dlp::YoutubeDLOptions;
 
 use crate::{app::App, unreachable::Unreachable};
 
@@ -39,21 +38,19 @@ impl Subscription {
 }
 
 /// Check whether an URL could be used as a subscription URL
-pub async fn check_url(url: &Url) -> Result<bool> {
-    let Value::Object(yt_opts) = json!( {
-        "playliststart": 1,
-        "playlistend": 10,
-        "noplaylist": false,
-        "extract_flat": "in_playlist",
-    }) else {
-        unreachable!("This is hardcoded");
-    };
-
-    let info = yt_dlp::extract_info(&yt_opts, url, false, false).await?;
+pub async fn check_url(url: Url) -> Result<bool> {
+    let yt_dlp = YoutubeDLOptions::new()
+        .set("playliststart", 1)
+        .set("playlistend", 10)
+        .set("noplaylist", false)
+        .set("extract_flat", "in_playlist")
+        .build()?;
+
+    let info = yt_dlp.extract_info(&url, false, false)?;
 
     debug!("{:#?}", info);
 
-    Ok(info._type == Some(InfoType::Playlist))
+    Ok(info.get("_type") == Some(&serde_json::Value::String("Playlist".to_owned())))
 }
 
 #[derive(Default, Debug)]
diff --git a/yt/src/storage/video_database/downloader.rs b/crates/yt/src/storage/video_database/downloader.rs
index a95081e..a95081e 100644
--- a/yt/src/storage/video_database/downloader.rs
+++ b/crates/yt/src/storage/video_database/downloader.rs
diff --git a/yt/src/storage/video_database/extractor_hash.rs b/crates/yt/src/storage/video_database/extractor_hash.rs
index df545d7..df545d7 100644
--- a/yt/src/storage/video_database/extractor_hash.rs
+++ b/crates/yt/src/storage/video_database/extractor_hash.rs
diff --git a/yt/src/storage/video_database/get/mod.rs b/crates/yt/src/storage/video_database/get/mod.rs
index 6a4220e..0456cd3 100644
--- a/yt/src/storage/video_database/get/mod.rs
+++ b/crates/yt/src/storage/video_database/get/mod.rs
@@ -18,7 +18,7 @@ use anyhow::{Context, Result, bail};
 use blake3::Hash;
 use log::{debug, trace};
 use sqlx::query;
-use yt_dlp::wrapper::info_json::InfoJson;
+use yt_dlp::InfoJson;
 
 use crate::{
     app::App,
@@ -64,7 +64,11 @@ macro_rules! video_from_record {
                 let optional = if let Some(cache_path) = &$record.cache_path {
                     Some((
                         PathBuf::from(cache_path),
-                        if $record.is_focused == 1 { true } else { false },
+                        if $record.is_focused == Some(1) {
+                            true
+                        } else {
+                            false
+                        },
                     ))
                 } else {
                     None
diff --git a/yt/src/storage/video_database/get/playlist/iterator.rs b/crates/yt/src/storage/video_database/get/playlist/iterator.rs
index 4c45bf7..4c45bf7 100644
--- a/yt/src/storage/video_database/get/playlist/iterator.rs
+++ b/crates/yt/src/storage/video_database/get/playlist/iterator.rs
diff --git a/yt/src/storage/video_database/get/playlist/mod.rs b/crates/yt/src/storage/video_database/get/playlist/mod.rs
index f6aadbf..f6aadbf 100644
--- a/yt/src/storage/video_database/get/playlist/mod.rs
+++ b/crates/yt/src/storage/video_database/get/playlist/mod.rs
diff --git a/yt/src/storage/video_database/mod.rs b/crates/yt/src/storage/video_database/mod.rs
index 74d09f0..74d09f0 100644
--- a/yt/src/storage/video_database/mod.rs
+++ b/crates/yt/src/storage/video_database/mod.rs
diff --git a/yt/src/storage/video_database/notify.rs b/crates/yt/src/storage/video_database/notify.rs
index b55c00a..b55c00a 100644
--- a/yt/src/storage/video_database/notify.rs
+++ b/crates/yt/src/storage/video_database/notify.rs
diff --git a/yt/src/storage/video_database/set/mod.rs b/crates/yt/src/storage/video_database/set/mod.rs
index 4006fde..8c1be4a 100644
--- a/yt/src/storage/video_database/set/mod.rs
+++ b/crates/yt/src/storage/video_database/set/mod.rs
@@ -19,17 +19,17 @@ use log::{debug, info};
 use sqlx::query;
 use tokio::fs;
 
-use crate::{
-    app::App,
-    storage::video_database::{VideoStatusMarker, extractor_hash::ExtractorHash},
-    video_from_record,
-};
+use crate::{app::App, storage::video_database::extractor_hash::ExtractorHash, video_from_record};
 
 use super::{Priority, Video, VideoOptions, VideoStatus};
 
 mod playlist;
 pub use playlist::*;
 
+const fn is_focused_to_value(is_focused: bool) -> Option<i8> {
+    if is_focused { Some(1) } else { None }
+}
+
 /// Set a new status for a video.
 /// This will only update the status time stamp/priority when the status or the priority has changed .
 pub async fn video_status(
@@ -56,7 +56,7 @@ pub async fn video_status(
     };
 
     let old_marker = old.status.as_marker();
-    let cache_path = {
+    let (cache_path, is_focused) = {
         fn cache_path_to_string(path: &Path) -> Result<String> {
             Ok(path
                 .to_str()
@@ -69,13 +69,17 @@ pub async fn video_status(
                 .to_owned())
         }
 
-        match (old_marker, &new_status) {
-            (VideoStatusMarker::Cached, VideoStatus::Cached { cache_path, .. }) => {
-                Some(cache_path_to_string(cache_path)?)
-            }
-            (_, VideoStatus::Cached { cache_path, .. }) => Some(cache_path_to_string(cache_path)?),
-
-            (VideoStatusMarker::Cached | _, _) => None,
+        if let VideoStatus::Cached {
+            cache_path,
+            is_focused,
+        } = &new_status
+        {
+            (
+                Some(cache_path_to_string(cache_path)?),
+                is_focused_to_value(*is_focused),
+            )
+        } else {
+            (None, None)
         }
     };
 
@@ -98,13 +102,14 @@ pub async fn video_status(
         query!(
             r#"
         UPDATE videos
-        SET status = ?, last_status_change = ?, priority = ?, cache_path = ?
+        SET status = ?, last_status_change = ?, priority = ?, cache_path = ?, is_focused = ?
         WHERE extractor_hash = ?;
         "#,
             new_status,
             now,
             new_priority,
             cache_path,
+            is_focused,
             video_hash
         )
         .execute(&app.database)
@@ -125,12 +130,13 @@ pub async fn video_status(
         query!(
             r#"
         UPDATE videos
-        SET status = ?, last_status_change = ?, cache_path = ?
+        SET status = ?, last_status_change = ?, cache_path = ?, is_focused = ?
         WHERE extractor_hash = ?;
         "#,
             new_status,
             now,
             cache_path,
+            is_focused,
             video_hash
         )
         .execute(&app.database)
@@ -147,10 +153,9 @@ pub async fn video_status(
 /// # Panics
 /// Only if assertions fail.
 pub async fn video_watched(app: &App, video: &ExtractorHash) -> Result<()> {
-    let video_hash = video.hash().to_string();
-    let new_status = VideoStatusMarker::Watched.as_db_integer();
-
     let old = {
+        let video_hash = video.hash().to_string();
+
         let base = query!(
             r#"
     SELECT *
@@ -175,20 +180,7 @@ pub async fn video_watched(app: &App, video: &ExtractorHash) -> Result<()> {
         unreachable!("The video must be marked as Cached before it can be marked Watched");
     }
 
-    let now = Utc::now().timestamp();
-
-    query!(
-        r#"
-        UPDATE videos
-        SET status = ?, last_status_change = ?, cache_path = NULL
-        WHERE extractor_hash = ?;
-        "#,
-        new_status,
-        now,
-        video_hash
-    )
-    .execute(&app.database)
-    .await?;
+    video_status(app, video, VideoStatus::Watched, None).await?;
 
     Ok(())
 }
@@ -271,10 +263,10 @@ pub async fn add_video(app: &App, video: Video) -> Result<()> {
                     })?
                     .to_string(),
             ),
-            is_focused,
+            is_focused_to_value(is_focused),
         )
     } else {
-        (None, false)
+        (None, None)
     };
 
     let duration: Option<f64> = video.duration.as_secs_f64();
diff --git a/yt/src/storage/video_database/set/playlist.rs b/crates/yt/src/storage/video_database/set/playlist.rs
index 7e97239..547df21 100644
--- a/yt/src/storage/video_database/set/playlist.rs
+++ b/crates/yt/src/storage/video_database/set/playlist.rs
@@ -28,12 +28,9 @@ pub async fn focused(
     new_video_hash: &ExtractorHash,
     old_video_hash: Option<&ExtractorHash>,
 ) -> Result<()> {
-    if let Some(old) = old_video_hash {
-        debug!("Unfocusing video: '{old}'");
-        unfocused(app, old).await?;
-    }
-    debug!("Focusing video: '{new_video_hash}'");
+    unfocused(app, old_video_hash).await?;
 
+    debug!("Focusing video: '{new_video_hash}'");
     let new_hash = new_video_hash.hash().to_string();
     query!(
         r#"
@@ -57,15 +54,38 @@ pub async fn focused(
 }
 
 /// Set a video to be no longer focused.
+/// This will use the supplied `video_hash` if it is [`Some`], otherwise it will simply un-focus
+/// the currently focused video.
 ///
 /// # Panics
 /// Only if internal assertions fail.
-pub async fn unfocused(app: &App, video_hash: &ExtractorHash) -> Result<()> {
-    let hash = video_hash.hash().to_string();
+pub async fn unfocused(app: &App, video_hash: Option<&ExtractorHash>) -> Result<()> {
+    let hash = if let Some(hash) = video_hash {
+        hash.hash().to_string()
+    } else {
+        let output = query!(
+            r#"
+                SELECT extractor_hash
+                FROM videos
+                WHERE is_focused = 1;
+            "#,
+        )
+        .fetch_optional(&app.database)
+        .await?;
+
+        if let Some(output) = output {
+            output.extractor_hash
+        } else {
+            // There is no unfocused video right now.
+            return Ok(());
+        }
+    };
+    debug!("Unfocusing video: '{hash}'");
+
     query!(
         r#"
             UPDATE videos
-            SET is_focused = 0
+            SET is_focused = NULL
             WHERE extractor_hash = ?;
         "#,
         hash
diff --git a/yt/src/subscribe/mod.rs b/crates/yt/src/subscribe/mod.rs
index 455ccb1..7ac0be4 100644
--- a/yt/src/subscribe/mod.rs
+++ b/crates/yt/src/subscribe/mod.rs
@@ -14,10 +14,9 @@ use std::str::FromStr;
 use anyhow::{Context, Result, bail};
 use futures::FutureExt;
 use log::warn;
-use serde_json::{Value, json};
 use tokio::io::{AsyncBufRead, AsyncBufReadExt};
 use url::Url;
-use yt_dlp::wrapper::info_json::InfoType;
+use yt_dlp::{YoutubeDLOptions, json_get};
 
 use crate::{
     app::App,
@@ -142,26 +141,24 @@ pub async fn subscribe(app: &App, name: Option<String>, url: Url) -> Result<()>
 }
 
 async fn actual_subscribe(app: &App, name: Option<String>, url: Url) -> Result<()> {
-    if !check_url(&url).await? {
+    if !check_url(url.clone()).await? {
         bail!("The url ('{}') does not represent a playlist!", &url)
-    };
+    }
 
     let name = if let Some(name) = name {
         name
     } else {
-        let Value::Object(yt_opts) = json!( {
-            "playliststart": 1,
-            "playlistend": 10,
-            "noplaylist": false,
-            "extract_flat": "in_playlist",
-        }) else {
-            unreachable!("This is hardcoded")
-        };
-
-        let info = yt_dlp::extract_info(&yt_opts, &url, false, false).await?;
-
-        if info._type == Some(InfoType::Playlist) {
-            info.title.expect("This should be some for a playlist")
+        let yt_dlp = YoutubeDLOptions::new()
+            .set("playliststart", 1)
+            .set("playlistend", 10)
+            .set("noplaylist", false)
+            .set("extract_flat", "in_playlist")
+            .build()?;
+
+        let info = yt_dlp.extract_info(&url, false, false)?;
+
+        if info.get("_type") == Some(&serde_json::Value::String("Playlist".to_owned())) {
+            json_get!(info, "title", as_str).to_owned()
         } else {
             bail!("The url ('{}') does not represent a playlist!", &url)
         }
diff --git a/yt/src/unreachable.rs b/crates/yt/src/unreachable.rs
index 436fbb6..436fbb6 100644
--- a/yt/src/unreachable.rs
+++ b/crates/yt/src/unreachable.rs
diff --git a/yt/src/update/mod.rs b/crates/yt/src/update/mod.rs
index 7efe0da..f0b1e2c 100644
--- a/yt/src/update/mod.rs
+++ b/crates/yt/src/update/mod.rs
@@ -15,7 +15,7 @@ use anyhow::{Context, Ok, Result};
 use chrono::{DateTime, Utc};
 use log::{info, warn};
 use url::Url;
-use yt_dlp::{unsmuggle_url, wrapper::info_json::InfoJson};
+use yt_dlp::{InfoJson, json_cast, json_get};
 
 use crate::{
     app::App,
@@ -72,19 +72,7 @@ pub async fn update(
 }
 
 #[allow(clippy::too_many_lines)]
-pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Result<Video> {
-    macro_rules! unwrap_option {
-        ($option:expr) => {
-            match $option {
-                Some(x) => x,
-                None => anyhow::bail!(concat!(
-                    "Expected a value, but '",
-                    stringify!($option),
-                    "' is None!"
-                )),
-            }
-        };
-    }
+pub fn video_entry_to_video(entry: &InfoJson, sub: Option<&Subscription>) -> Result<Video> {
     fn fmt_context(date: &str, extended: Option<&str>) -> String {
         let f = format!(
             "Failed to parse the `upload_date` of the entry ('{date}'). \
@@ -97,7 +85,9 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
         }
     }
 
-    let publish_date = if let Some(date) = &entry.upload_date {
+    let publish_date = if let Some(date) = &entry.get("upload_date") {
+        let date = json_cast!(date, as_str);
+
         let year: u32 = date
             .chars()
             .take(4)
@@ -113,7 +103,7 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
             .with_context(|| fmt_context(date, None))?;
         let day: u32 = date
             .chars()
-            .skip(6)
+            .skip(4 + 2)
             .take(2)
             .collect::<String>()
             .parse()
@@ -128,42 +118,59 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
     } else {
         warn!(
             "The video '{}' lacks it's upload date!",
-            unwrap_option!(&entry.title)
+            json_get!(entry, "title", as_str)
         );
         None
     };
 
-    let thumbnail_url = match (&entry.thumbnails, &entry.thumbnail) {
+    let thumbnail_url = match (&entry.get("thumbnails"), &entry.get("thumbnail")) {
         (None, None) => None,
-        (None, Some(thumbnail)) => Some(thumbnail.to_owned()),
+        (None, Some(thumbnail)) => Some(Url::from_str(json_cast!(thumbnail, as_str))?),
 
         // TODO: The algorithm is not exactly the best <2024-05-28>
-        (Some(thumbnails), None) => thumbnails.first().map(|thumbnail| thumbnail.url.clone()),
-        (Some(_), Some(thumnail)) => Some(thumnail.to_owned()),
+        (Some(thumbnails), None) => {
+            if let Some(thumbnail) = json_cast!(thumbnails, as_array).first() {
+                Some(Url::from_str(json_get!(
+                    json_cast!(thumbnail, as_object),
+                    "url",
+                    as_str
+                ))?)
+            } else {
+                None
+            }
+        }
+        (Some(_), Some(thumnail)) => Some(Url::from_str(json_cast!(thumnail, as_str))?),
     };
 
     let url = {
-        let smug_url: Url = unwrap_option!(entry.webpage_url.clone());
-        unsmuggle_url(&smug_url)?
+        let smug_url: Url = json_get!(entry, "webpage_url", as_str).parse()?;
+        // unsmuggle_url(&smug_url)?
+        smug_url
     };
 
-    let extractor_hash = blake3::hash(unwrap_option!(entry.id).as_bytes());
+    let extractor_hash = blake3::hash(json_get!(entry, "id", as_str).as_bytes());
 
     let subscription_name = if let Some(sub) = sub {
         Some(sub.name.clone())
-    } else if let Some(uploader) = entry.uploader {
-        if entry.webpage_url_domain == Some("youtube.com".to_owned()) {
+    } else if let Some(uploader) = entry.get("uploader") {
+        if entry.get("webpage_url_domain")
+            == Some(&serde_json::Value::String("youtube.com".to_owned()))
+        {
             Some(format!("{uploader} - Videos"))
         } else {
-            Some(uploader.clone())
+            Some(json_cast!(uploader, as_str).to_owned())
         }
     } else {
         None
     };
 
     let video = Video {
-        description: entry.description.clone(),
-        duration: MaybeDuration::from_maybe_secs_f64(entry.duration),
+        description: entry
+            .get("description")
+            .map(|val| json_cast!(val, as_str).to_owned()),
+        duration: MaybeDuration::from_maybe_secs_f64(
+            entry.get("duration").map(|val| json_cast!(val, as_f64)),
+        ),
         extractor_hash: ExtractorHash::from_hash(extractor_hash),
         last_status_change: TimeStamp::from_now(),
         parent_subscription_name: subscription_name,
@@ -171,7 +178,7 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
         publish_date: publish_date.map(TimeStamp::from_secs),
         status: VideoStatus::Pick,
         thumbnail_url,
-        title: unwrap_option!(entry.title.clone()),
+        title: json_get!(entry, "title", as_str).to_owned(),
         url,
         watch_progress: Duration::default(),
     };
@@ -180,7 +187,7 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
 
 async fn process_subscription(app: &App, sub: &Subscription, entry: InfoJson) -> Result<()> {
     let video =
-        video_entry_to_video(entry, Some(sub)).context("Failed to parse search entry as Video")?;
+        video_entry_to_video(&entry, Some(sub)).context("Failed to parse search entry as Video")?;
 
     add_video(app, video.clone())
         .await
diff --git a/crates/yt/src/update/updater.rs b/crates/yt/src/update/updater.rs
new file mode 100644
index 0000000..8da654b
--- /dev/null
+++ b/crates/yt/src/update/updater.rs
@@ -0,0 +1,167 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::io::{Write, stderr};
+
+use anyhow::{Context, Result};
+use blake3::Hash;
+use futures::{
+    StreamExt, TryStreamExt,
+    stream::{self},
+};
+use log::{Level, debug, error, log_enabled};
+use serde_json::json;
+use yt_dlp::{InfoJson, YoutubeDLOptions, json_cast, json_get};
+
+use crate::{
+    ansi_escape_codes::{clear_whole_line, move_to_col},
+    app::App,
+    storage::subscriptions::Subscription,
+};
+
+use super::process_subscription;
+
+pub(super) struct Updater<'a> {
+    max_backlog: usize,
+    hashes: &'a [Hash],
+}
+
+impl<'a> Updater<'a> {
+    pub(super) fn new(max_backlog: usize, hashes: &'a [Hash]) -> Self {
+        Self {
+            max_backlog,
+            hashes,
+        }
+    }
+
+    pub(super) async fn update(
+        &mut self,
+        app: &App,
+        subscriptions: &[&Subscription],
+    ) -> Result<()> {
+        let mut stream = stream::iter(subscriptions)
+            .map(|sub| self.get_new_entries(sub))
+            .buffer_unordered(100);
+
+        while let Some(output) = stream.next().await {
+            let mut entries = output?;
+
+            if entries.is_empty() {
+                continue;
+            }
+
+            let (sub, entry) = entries.remove(0);
+            process_subscription(app, sub, entry).await?;
+
+            let entry_stream: Result<()> = stream::iter(entries)
+                .map(|(sub, entry)| process_subscription(app, sub, entry))
+                .buffer_unordered(100)
+                .try_collect()
+                .await;
+            entry_stream?;
+        }
+
+        Ok(())
+    }
+
+    async fn get_new_entries(
+        &self,
+        sub: &'a Subscription,
+    ) -> Result<Vec<(&'a Subscription, InfoJson)>> {
+        let yt_dlp = YoutubeDLOptions::new()
+            .set("playliststart", 1)
+            .set("playlistend", self.max_backlog)
+            .set("noplaylist", false)
+            .set(
+                "extractor_args",
+                json! {{"youtubetab": {"approximate_date": [""]}}},
+            )
+            // TODO: This also removes unlisted and other stuff. Find a good way to remove the
+            // members-only videos from the feed. <2025-04-17>
+            .set("match-filter", "availability=public")
+            .build()?;
+
+        if !log_enabled!(Level::Debug) {
+            clear_whole_line();
+            move_to_col(1);
+            eprint!("Checking playlist {}...", sub.name);
+            move_to_col(1);
+            stderr().flush()?;
+        }
+
+        let info = yt_dlp
+            .extract_info(&sub.url, false, false)
+            .with_context(|| format!("Failed to get playlist '{}'.", sub.name))?;
+
+        let empty = vec![];
+        let entries = info
+            .get("entries")
+            .map_or(&empty, |val| json_cast!(val, as_array));
+
+        let valid_entries: Vec<(&Subscription, InfoJson)> = entries
+            .iter()
+            .take(self.max_backlog)
+            .filter_map(|entry| -> Option<(&Subscription, InfoJson)> {
+                let id = json_get!(entry, "id", as_str);
+                let extractor_hash = blake3::hash(id.as_bytes());
+                if self.hashes.contains(&extractor_hash) {
+                    debug!("Skipping entry, as it is already present: '{extractor_hash}'",);
+                    None
+                } else {
+                    Some((sub, json_cast!(entry, as_object).to_owned()))
+                }
+            })
+            .collect();
+
+        let processed_entries: Vec<(&Subscription, InfoJson)> = stream::iter(valid_entries)
+            .map(
+                async |(sub, entry)| match yt_dlp.process_ie_result(entry, false) {
+                    Ok(output) => Ok((sub, output)),
+                    Err(err) => Err(err),
+                },
+            )
+            .buffer_unordered(100)
+            .collect::<Vec<_>>()
+            .await
+            .into_iter()
+            // Don't fail the whole update, if one of the entries fails to fetch.
+            .filter_map(|base| match base {
+                Ok(ok) => Some(ok),
+                Err(err) => {
+                    // TODO(@bpeetz): Add this <2025-06-13>
+                    // if let YtDlpError::PythonError { error, kind } = &err {
+                    //     if kind.as_str() == "<class 'yt_dlp.utils.DownloadError'>"
+                    //         && error.to_string().as_str().contains(
+                    //             "Join this channel to get access to members-only content ",
+                    //         )
+                    //     {
+                    //         // Hide this error
+                    //     } else {
+                    //         let error_string = error.to_string();
+                    //         let error = error_string
+                    //             .strip_prefix("DownloadError: \u{1b}[0;31mERROR:\u{1b}[0m ")
+                    //             .expect("This prefix should exists");
+                    //         error!("{error}");
+                    //     }
+                    //     return None;
+                    // }
+
+                    // TODO(@bpeetz): Ideally, we _would_ actually exit on unexpected errors, but
+                    // this is fine for now.  <2025-06-13>
+                    // Some(Err(err).context("Failed to process new entries."))
+                    error!("While processing entry: {err}");
+                    None
+                }
+            })
+            .collect();
+
+        Ok(processed_entries)
+    }
+}
diff --git a/yt/src/version/mod.rs b/crates/yt/src/version/mod.rs
index 05d85e0..05d85e0 100644
--- a/yt/src/version/mod.rs
+++ b/crates/yt/src/version/mod.rs
diff --git a/yt/src/videos/display/format_video.rs b/crates/yt/src/videos/display/format_video.rs
index b97acb1..b97acb1 100644
--- a/yt/src/videos/display/format_video.rs
+++ b/crates/yt/src/videos/display/format_video.rs
diff --git a/yt/src/videos/display/mod.rs b/crates/yt/src/videos/display/mod.rs
index 1188569..1188569 100644
--- a/yt/src/videos/display/mod.rs
+++ b/crates/yt/src/videos/display/mod.rs
diff --git a/yt/src/videos/mod.rs b/crates/yt/src/videos/mod.rs
index e821772..e821772 100644
--- a/yt/src/videos/mod.rs
+++ b/crates/yt/src/videos/mod.rs
diff --git a/yt/src/watch/mod.rs b/crates/yt/src/watch/mod.rs
index 6827b2c..c32a76f 100644
--- a/yt/src/watch/mod.rs
+++ b/crates/yt/src/watch/mod.rs
@@ -58,9 +58,12 @@ fn init_mpv(app: &App) -> Result<(Mpv, EventContext)> {
     let config_path = &app.config.paths.mpv_config_path;
     if config_path.try_exists()? {
         info!("Found mpv.conf at '{}'!", config_path.display());
-        mpv.command("load-config-file", &[config_path
-            .to_str()
-            .context("Failed to parse the config path is utf8-stringt")?])?;
+        mpv.command(
+            "load-config-file",
+            &[config_path
+                .to_str()
+                .context("Failed to parse the config path is utf8-stringt")?],
+        )?;
     } else {
         warn!(
             "Did not find a mpv.conf file at '{}'",
@@ -71,9 +74,12 @@ fn init_mpv(app: &App) -> Result<(Mpv, EventContext)> {
     let input_path = &app.config.paths.mpv_input_path;
     if input_path.try_exists()? {
         info!("Found mpv.input.conf at '{}'!", input_path.display());
-        mpv.command("load-input-conf", &[input_path
-            .to_str()
-            .context("Failed to parse the input path as utf8 string")?])?;
+        mpv.command(
+            "load-input-conf",
+            &[input_path
+                .to_str()
+                .context("Failed to parse the input path as utf8 string")?],
+        )?;
     } else {
         warn!(
             "Did not find a mpv.input.conf file at '{}'",
diff --git a/yt/src/watch/playlist.rs b/crates/yt/src/watch/playlist.rs
index 6ac8b12..ff383d0 100644
--- a/yt/src/watch/playlist.rs
+++ b/crates/yt/src/watch/playlist.rs
@@ -11,6 +11,7 @@
 use std::path::Path;
 
 use crate::{
+    ansi_escape_codes::{cursor_up, erase_in_display_from_cursor},
     app::App,
     storage::video_database::{Video, VideoStatus, get, notify::wait_for_db_write},
 };
@@ -31,17 +32,6 @@ fn cache_values(video: &Video) -> (&Path, bool) {
     }
 }
 
-// ANSI ESCAPE CODES Wrappers {{{
-// see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
-const CSI: &str = "\x1b[";
-fn erase_in_display_from_cursor() {
-    print!("{CSI}0J");
-}
-fn cursor_up(number: usize) {
-    print!("{CSI}{number}A");
-}
-// }}}
-
 /// # Panics
 /// Only if internal assertions fail.
 pub async fn playlist(app: &App, watch: bool) -> Result<()> {
diff --git a/yt/src/watch/playlist_handler/client_messages/mod.rs b/crates/yt/src/watch/playlist_handler/client_messages/mod.rs
index 6f7a59e..6f7a59e 100644
--- a/yt/src/watch/playlist_handler/client_messages/mod.rs
+++ b/crates/yt/src/watch/playlist_handler/client_messages/mod.rs
diff --git a/yt/src/watch/playlist_handler/mod.rs b/crates/yt/src/watch/playlist_handler/mod.rs
index 2672ff5..29b8f39 100644
--- a/yt/src/watch/playlist_handler/mod.rs
+++ b/crates/yt/src/watch/playlist_handler/mod.rs
@@ -41,10 +41,10 @@ pub enum Status {
 }
 
 fn mpv_message(mpv: &Mpv, message: &str, time: Duration) -> Result<()> {
-    mpv.command("show-text", &[
-        message,
-        time.as_millis().to_string().as_str(),
-    ])?;
+    mpv.command(
+        "show-text",
+        &[message, time.as_millis().to_string().as_str()],
+    )?;
     Ok(())
 }
 
@@ -139,15 +139,18 @@ pub(super) async fn reload_mpv_playlist(
 
     debug!("Will add {} videos to playlist.", playlist.len());
     playlist.into_iter().try_for_each(|cache_path| {
-        mpv.command("loadfile", &[
-            cache_path.to_str().with_context(|| {
-                format!(
-                    "Failed to parse the video cache path ('{}') as valid utf8",
-                    cache_path.display()
-                )
-            })?,
-            "append-play",
-        ])?;
+        mpv.command(
+            "loadfile",
+            &[
+                cache_path.to_str().with_context(|| {
+                    format!(
+                        "Failed to parse the video cache path ('{}') as valid utf8",
+                        cache_path.display()
+                    )
+                })?,
+                "append-play",
+            ],
+        )?;
 
         Ok::<(), anyhow::Error>(())
     })?;
diff --git a/crates/yt_dlp/.cargo/config.toml b/crates/yt_dlp/.cargo/config.toml
deleted file mode 100644
index d84f14d..0000000
--- a/crates/yt_dlp/.cargo/config.toml
+++ /dev/null
@@ -1,12 +0,0 @@
-# yt - A fully featured command line YouTube client
-#
-# Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-# SPDX-License-Identifier: GPL-3.0-or-later
-#
-# This file is part of Yt.
-#
-# You should have received a copy of the License along with this program.
-# If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-[env]
-PYO3_PYTHON = "/nix/store/7xzk119acyws2c4ysygdv66l0grxkr39-python3-3.11.9-env/bin/python3"
diff --git a/crates/yt_dlp/Cargo.toml b/crates/yt_dlp/Cargo.toml
index a948a34..ddd5f9b 100644
--- a/crates/yt_dlp/Cargo.toml
+++ b/crates/yt_dlp/Cargo.toml
@@ -10,7 +10,7 @@
 
 [package]
 name = "yt_dlp"
-description = "A wrapper around the python yt_dlp library"
+description = "A rust fii wrapper library for the python yt_dlp library"
 keywords = []
 categories = []
 version.workspace = true
@@ -19,19 +19,16 @@ authors.workspace = true
 license.workspace = true
 repository.workspace = true
 rust-version.workspace = true
-publish = false
+publish = true
 
 [dependencies]
-pyo3 = { version = "0.23.4", features = ["auto-initialize"] }
-bytes.workspace = true
+indexmap = { version = "2.9.0", default-features = false }
 log.workspace = true
-serde.workspace = true
+rustpython = { git = "https://github.com/RustPython/RustPython.git", features = ["threading", "stdlib", "stdio", "importlib", "ssl"], default-features = false }
 serde_json.workspace = true
+thiserror = "2.0.12"
 url.workspace = true
 
-[dev-dependencies]
-tokio.workspace = true
-
 [lints]
 workspace = true
 
diff --git a/crates/yt_dlp/src/duration.rs b/crates/yt_dlp/src/duration.rs
deleted file mode 100644
index 19181a5..0000000
--- a/crates/yt_dlp/src/duration.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// TODO: This file should be de-duplicated with the same file in the 'yt' crate <2024-06-25>
-
-#[derive(Debug, Clone, Copy)]
-pub struct Duration {
-    time: u32,
-}
-
-impl From<&str> for Duration {
-    fn from(v: &str) -> Self {
-        let buf: Vec<_> = v.split(':').take(2).collect();
-        Self {
-            time: (buf[0].parse::<u32>().expect("Should be a number") * 60)
-                + buf[1].parse::<u32>().expect("Should be a number"),
-        }
-    }
-}
-
-impl From<Option<f64>> for Duration {
-    fn from(value: Option<f64>) -> Self {
-        Self {
-            #[allow(
-                clippy::cast_possible_truncation,
-                clippy::cast_precision_loss,
-                clippy::cast_sign_loss
-            )]
-            time: value.unwrap_or(0.0).ceil() as u32,
-        }
-    }
-}
-
-impl std::fmt::Display for Duration {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
-        const SECOND: u32 = 1;
-        const MINUTE: u32 = 60 * SECOND;
-        const HOUR: u32 = 60 * MINUTE;
-
-        let base_hour = self.time - (self.time % HOUR);
-        let base_min = (self.time % HOUR) - ((self.time % HOUR) % MINUTE);
-        let base_sec = (self.time % HOUR) % MINUTE;
-
-        let h = base_hour / HOUR;
-        let m = base_min / MINUTE;
-        let s = base_sec / SECOND;
-
-        if self.time == 0 {
-            write!(f, "0s")
-        } else if h > 0 {
-            write!(f, "{h}h {m}m")
-        } else {
-            write!(f, "{m}m {s}s")
-        }
-    }
-}
-#[cfg(test)]
-mod test {
-    use super::Duration;
-
-    #[test]
-    fn test_display_duration_1h() {
-        let dur = Duration { time: 60 * 60 };
-        assert_eq!("1h 0m".to_owned(), dur.to_string());
-    }
-    #[test]
-    fn test_display_duration_30min() {
-        let dur = Duration { time: 60 * 30 };
-        assert_eq!("30m 0s".to_owned(), dur.to_string());
-    }
-}
diff --git a/crates/yt_dlp/src/error.rs b/crates/yt_dlp/src/error.rs
deleted file mode 100644
index 3881f0b..0000000
--- a/crates/yt_dlp/src/error.rs
+++ /dev/null
@@ -1,68 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use std::{fmt::Display, io};
-
-use pyo3::Python;
-
-#[derive(Debug)]
-#[allow(clippy::module_name_repetitions)]
-pub enum YtDlpError {
-    ResponseParseError {
-        error: serde_json::error::Error,
-    },
-    PythonError {
-        error: Box<pyo3::PyErr>,
-        kind: String,
-    },
-    IoError {
-        error: io::Error,
-    },
-}
-
-impl std::error::Error for YtDlpError {}
-
-impl Display for YtDlpError {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        match self {
-            YtDlpError::ResponseParseError { error } => write!(
-                f,
-                include_str!("./python_json_decode_failed.error_msg"),
-                error
-            ),
-            YtDlpError::PythonError { error, kind: _ } => write!(f, "Python error: {error}"),
-            YtDlpError::IoError { error } => write!(f, "Io error: {error}"),
-        }
-    }
-}
-
-impl From<serde_json::error::Error> for YtDlpError {
-    fn from(value: serde_json::error::Error) -> Self {
-        Self::ResponseParseError { error: value }
-    }
-}
-
-impl From<pyo3::PyErr> for YtDlpError {
-    fn from(value: pyo3::PyErr) -> Self {
-        Python::with_gil(|py| {
-            let kind = value.get_type(py).to_string();
-            Self::PythonError {
-                error: Box::new(value),
-                kind,
-            }
-        })
-    }
-}
-
-impl From<io::Error> for YtDlpError {
-    fn from(value: io::Error) -> Self {
-        Self::IoError { error: value }
-    }
-}
diff --git a/crates/yt_dlp/src/lib.rs b/crates/yt_dlp/src/lib.rs
index 40610c2..34b8a5d 100644
--- a/crates/yt_dlp/src/lib.rs
+++ b/crates/yt_dlp/src/lib.rs
@@ -1,551 +1,541 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
-#![allow(unsafe_op_in_unsafe_fn)]
-#![allow(clippy::missing_errors_doc)]
-
-use std::io::stderr;
-use std::{env, process};
-use std::{fs::File, io::Write};
-
-use std::{path::PathBuf, sync::Once};
-
-use crate::{duration::Duration, logging::setup_logging, wrapper::info_json::InfoJson};
-
-use bytes::Bytes;
-use error::YtDlpError;
-use log::{Level, debug, info, log_enabled};
-use pyo3::types::{PyString, PyTuple, PyTupleMethods};
-use pyo3::{
-    Bound, PyAny, PyResult, Python, pyfunction,
-    types::{PyAnyMethods, PyDict, PyDictMethods, PyList, PyListMethods, PyModule},
-    wrap_pyfunction,
+//! The `yt_dlp` interface is completely contained in the [`YoutubeDL`] structure.
+
+use std::io::Write;
+use std::mem;
+use std::{env, fs::File, path::PathBuf};
+
+use indexmap::IndexMap;
+use log::{Level, debug, error, info, log_enabled};
+use logging::setup_logging;
+use rustpython::vm::builtins::PyList;
+use rustpython::{
+    InterpreterConfig,
+    vm::{
+        self, Interpreter, PyObjectRef, PyRef, VirtualMachine,
+        builtins::{PyBaseException, PyDict, PyStr},
+        function::{FuncArgs, KwArgs, PosArgs},
+    },
 };
-use serde::Serialize;
-use serde_json::{Map, Value};
 use url::Url;
 
-pub mod duration;
-pub mod error;
-pub mod logging;
-pub mod wrapper;
+mod logging;
+pub mod progress_hook;
 
-#[cfg(test)]
-mod tests;
-
-/// Synchronisation helper, to ensure that we don't setup the logger multiple times
-static SYNC_OBJ: Once = Once::new();
+#[macro_export]
+macro_rules! json_get {
+    ($value:expr, $name:literal, $into:ident) => {
+        $crate::json_cast!($value.get($name).expect("Should exist"), $into)
+    };
+}
 
-/// Add a logger to the yt-dlp options.
-/// If you have an logger set (i.e. for rust), than this will log to rust
-///
-/// # Panics
-/// This should never panic.
-pub fn add_logger_and_sig_handler<'a>(
-    opts: Bound<'a, PyDict>,
-    py: Python<'_>,
-) -> PyResult<Bound<'a, PyDict>> {
-    /// Is the specified record to be logged? Returns false for no,
-    /// true for yes. Filters can either modify log records in-place or
-    /// return a completely different record instance which will replace
-    /// the original log record in any future processing of the event.
-    #[pyfunction]
-    fn filter_error_log(_py: Python<'_>, record: &Bound<'_, PyAny>) -> bool {
-        // Filter out all error logs (they are propagated as rust errors)
-        let levelname: String = record
-            .getattr("levelname")
-            .expect("This should exist")
-            .extract()
-            .expect("This should be a String");
-
-        let return_value = levelname.as_str() != "ERROR";
-
-        if log_enabled!(Level::Debug) && !return_value {
-            let message: String = record
-                .call_method0("getMessage")
-                .expect("This method exists")
-                .extract()
-                .expect("The message is a string");
-
-            debug!("Swollowed error message: '{message}'");
-        }
-        return_value
-    }
+#[macro_export]
+macro_rules! json_cast {
+    ($value:expr, $into:ident) => {
+        $value.$into().expect(concat!(
+            "Should be able to cast value into ",
+            stringify!($into)
+        ))
+    };
+}
 
-    setup_logging(py, "yt_dlp")?;
-
-    let logging = PyModule::import(py, "logging")?;
-    let ytdl_logger = logging.call_method1("getLogger", ("yt_dlp",))?;
-
-    // Ensure that all events are logged by setting the log level to NOTSET (we filter on rust's side)
-    // Also use this static, to ensure that we don't configure the logger every time
-    SYNC_OBJ.call_once(|| {
-        // Disable the SIGINT (Ctrl+C) handler, python installs.
-        // This allows the user to actually stop the application with Ctrl+C.
-        // This is here because it can only be run in the main thread and this was here already.
-        py.run(
-            c"\
-import signal
-signal.signal(signal.SIGINT, signal.SIG_DFL)",
-            None,
-            None,
-        )
-        .expect("This code should always work");
-
-        let config_opts = PyDict::new(py);
-        config_opts
-            .set_item("level", 0)
-            .expect("Setting this item should always work");
-
-        logging
-            .call_method("basicConfig", (), Some(&config_opts))
-            .expect("This method exists");
-    });
-
-    ytdl_logger.call_method1(
-        "addFilter",
-        (wrap_pyfunction!(filter_error_log, py).expect("This function can be wrapped"),),
-    )?;
-
-    // This was taken from `ytcc`, I don't think it is still applicable
-    // ytdl_logger.setattr("propagate", false)?;
-    // let logging_null_handler = logging.call_method0("NullHandler")?;
-    // ytdl_logger.setattr("addHandler", logging_null_handler)?;
-
-    opts.set_item("logger", ytdl_logger).expect("Should work");
-
-    Ok(opts)
+/// The core of the `yt_dlp` interface.
+pub struct YoutubeDL {
+    interpreter: Interpreter,
+    youtube_dl_class: PyObjectRef,
+    yt_dlp_module: PyObjectRef,
+    options: serde_json::Map<String, serde_json::Value>,
 }
 
-#[pyfunction]
-#[allow(clippy::too_many_lines)]
-#[allow(clippy::missing_panics_doc)]
-#[allow(clippy::items_after_statements)]
-#[allow(
-    clippy::cast_possible_truncation,
-    clippy::cast_sign_loss,
-    clippy::cast_precision_loss
-)]
-pub fn progress_hook(py: Python<'_>, input: &Bound<'_, PyDict>) -> PyResult<()> {
-    // Only add the handler, if the log-level is higher than Debug (this avoids covering debug
-    // messages).
-    if log_enabled!(Level::Debug) {
-        return Ok(());
+impl std::fmt::Debug for YoutubeDL {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        // TODO(@bpeetz): Use something useful here. <2025-06-13>
+        f.write_str("YoutubeDL")
     }
+}
 
-    // ANSI ESCAPE CODES Wrappers {{{
-    // see: https://en.wikipedia.org/wiki/ANSI_escape_code#Control_Sequence_Introducer_commands
-    const CSI: &str = "\x1b[";
-    fn clear_whole_line() {
-        eprint!("{CSI}2K");
-    }
-    fn move_to_col(x: usize) {
-        eprint!("{CSI}{x}G");
-    }
-    // }}}
-
-    let input: Map<String, Value> = serde_json::from_str(&json_dumps(
-        py,
-        input
-            .downcast::<PyAny>()
-            .expect("Will always work")
-            .to_owned(),
-    )?)
-    .expect("python's json is valid");
-
-    macro_rules! get {
-        (@interrogate $item:ident, $type_fun:ident, $get_fun:ident, $name:expr) => {{
-            let a = $item.get($name).expect(concat!(
-                "The field '",
-                stringify!($name),
-                "' should exist."
-            ));
-
-            if a.$type_fun() {
-                a.$get_fun().expect(
-                    "The should have been checked in the if guard, so unpacking here is fine",
-                )
-            } else {
-                panic!(
-                    "Value {} => \n{}\n is not of type: {}",
-                    $name,
-                    a,
-                    stringify!($type_fun)
-                );
+impl YoutubeDL {
+    /// Construct this instance from options.
+    ///
+    /// # Panics
+    /// If `yt_dlp` changed their interface.
+    ///
+    /// # Errors
+    /// If a python call fails.
+    pub fn from_options(mut options: YoutubeDLOptions) -> Result<Self, build::Error> {
+        let mut settings = vm::Settings::default();
+        if let Ok(python_path) = env::var("PYTHONPATH") {
+            for path in python_path.split(':') {
+                settings.path_list.push(path.to_owned());
             }
-        }};
+        } else {
+            error!(
+                "No PYTHONPATH found or invalid utf8. \
+                This means, that you probably did not \
+                supply the yt_dlp!"
+            );
+        }
 
-        ($type_fun:ident, $get_fun:ident, $name1:expr, $name2:expr) => {{
-            let a = get! {@interrogate input, is_object, as_object, $name1};
-            let b = get! {@interrogate a, $type_fun, $get_fun, $name2};
-            b
-        }};
+        settings.install_signal_handlers = false;
 
-        ($type_fun:ident, $get_fun:ident, $name:expr) => {{
-            get! {@interrogate input, $type_fun, $get_fun, $name}
-        }};
-    }
+        // NOTE(@bpeetz): Another value leads to an internal codegen error. <2025-06-13>
+        settings.optimize = 0;
 
-    macro_rules! default_get {
-        (@interrogate $item:ident, $default:expr, $get_fun:ident, $name:expr) => {{
-            let a = if let Some(field) = $item.get($name) {
-                field.$get_fun().unwrap_or($default)
-            } else {
-                $default
-            };
-            a
-        }};
-
-        ($get_fun:ident, $default:expr, $name1:expr, $name2:expr) => {{
-            let a = get! {@interrogate input, is_object, as_object, $name1};
-            let b = default_get! {@interrogate a, $default, $get_fun, $name2};
-            b
-        }};
-
-        ($get_fun:ident, $default:expr, $name:expr) => {{
-            default_get! {@interrogate input, $default, $get_fun, $name}
-        }};
-    }
+        settings.isolated = true;
 
-    macro_rules! c {
-        ($color:expr, $format:expr) => {
-            format!("\x1b[{}m{}\x1b[0m", $color, $format)
-        };
-    }
+        let interpreter = InterpreterConfig::new()
+            .init_stdlib()
+            .settings(settings)
+            .interpreter();
 
-    fn format_bytes(bytes: u64) -> String {
-        let bytes = Bytes::new(bytes);
-        bytes.to_string()
-    }
+        let output_options = options.options.clone();
 
-    fn format_speed(speed: f64) -> String {
-        #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
-        let bytes = Bytes::new(speed.floor() as u64);
-        format!("{bytes}/s")
-    }
+        let (yt_dlp_module, youtube_dl_class) = match interpreter.enter(|vm| {
+            let yt_dlp_module = vm.import("yt_dlp", 0)?;
+            let class = yt_dlp_module.get_attr("YoutubeDL", vm)?;
 
-    let get_title = || -> String {
-        match get! {is_string, as_str, "info_dict", "ext"} {
-            "vtt" => {
-                format!(
-                    "Subtitles ({})",
-                    default_get! {as_str, "<No Subtitle Language>", "info_dict", "name"}
-                )
+            let maybe_hook = mem::take(&mut options.progress_hook);
+            let opts = options.into_py_dict(vm);
+            if let Some(function) = maybe_hook {
+                opts.get_or_insert(vm, vm.new_pyobj("progress_hooks"), || {
+                    let hook: PyObjectRef = vm.new_function("progress_hook", function).into();
+                    vm.new_pyobj(vec![hook])
+                })
+                .expect("Should work?");
             }
-            "webm" | "mp4" | "mp3" | "m4a" => {
-                default_get! { as_str, "<No title>", "info_dict", "title"}.to_owned()
-            }
-            other => panic!("The extension '{other}' is not yet implemented"),
-        }
-    };
 
-    match get! {is_string, as_str, "status"} {
-        "downloading" => {
-            let elapsed = default_get! {as_f64, 0.0f64, "elapsed"};
-            let eta = default_get! {as_f64, 0.0, "eta"};
-            let speed = default_get! {as_f64, 0.0, "speed"};
-
-            let downloaded_bytes = get! {is_u64, as_u64, "downloaded_bytes"};
-            let (total_bytes, bytes_is_estimate): (u64, &'static str) = {
-                let total_bytes = default_get!(as_u64, 0, "total_bytes");
-                if total_bytes == 0 {
-                    let maybe_estimate = default_get!(as_u64, 0, "total_bytes_estimate");
-
-                    if maybe_estimate == 0 {
-                        // The download speed should be in bytes per second and the eta in seconds.
-                        // Thus multiplying them gets us the raw bytes (which were estimated by `yt_dlp`, from their `info.json`)
-                        let bytes_still_needed = (speed * eta).ceil() as u64;
-
-                        (downloaded_bytes + bytes_still_needed, "~")
-                    } else {
-                        (maybe_estimate, "~")
+            {
+                // Unconditionally set a logger.
+                // Otherwise, yt_dlp will log to stderr.
+
+                /// Is the specified record to be logged? Returns false for no,
+                /// true for yes. Filters can either modify log records in-place or
+                /// return a completely different record instance which will replace
+                /// the original log record in any future processing of the event.
+                fn filter_error_log(mut input: FuncArgs, vm: &VirtualMachine) -> bool {
+                    let record = input.args.remove(0);
+
+                    // Filter out all error logs (they are propagated as rust errors)
+                    let levelname: PyRef<PyStr> = record
+                        .get_attr("levelname", vm)
+                        .expect("This should exist")
+                        .downcast()
+                        .expect("This should be a String");
+
+                    let return_value = levelname.as_str() != "ERROR";
+
+                    if log_enabled!(Level::Debug) && !return_value {
+                        let message: String = {
+                            let get_message = record.get_attr("getMessage", vm).expect("Is set");
+                            let message: PyRef<PyStr> = get_message
+                                .call((), vm)
+                                .expect("Can be called")
+                                .downcast()
+                                .expect("Downcasting works");
+
+                            message.as_str().to_owned()
+                        };
+
+                        debug!("Swollowed error message: '{message}'");
                     }
-                } else {
-                    (total_bytes, "")
+                    return_value
                 }
-            };
-            let percent: f64 = {
-                if total_bytes == 0 {
-                    100.0
-                } else {
-                    (downloaded_bytes as f64 / total_bytes as f64) * 100.0
+
+                let logging = setup_logging(vm, "yt_dlp")?;
+                let ytdl_logger = {
+                    let get_logger = logging.get_item("getLogger", vm)?;
+                    get_logger.call(("yt_dlp",), vm)?
+                };
+
+                {
+                    let args = FuncArgs::new(
+                        PosArgs::new(vec![]),
+                        KwArgs::new({
+                            let mut map = IndexMap::new();
+                            // Ensure that all events are logged by setting
+                            // the log level to NOTSET (we filter on rust's side)
+                            map.insert("level".to_owned(), vm.new_pyobj(0));
+                            map
+                        }),
+                    );
+
+                    let basic_config = logging.get_item("basicConfig", vm)?;
+                    basic_config.call(args, vm)?;
                 }
-            };
 
-            clear_whole_line();
-            move_to_col(1);
-
-            eprint!(
-                "'{}' [{}/{} at {}] -> [{} of {}{} {}] ",
-                c!("34;1", get_title()),
-                c!("33;1", Duration::from(Some(elapsed))),
-                c!("33;1", Duration::from(Some(eta))),
-                c!("32;1", format_speed(speed)),
-                c!("31;1", format_bytes(downloaded_bytes)),
-                c!("31;1", bytes_is_estimate),
-                c!("31;1", format_bytes(total_bytes)),
-                c!("36;1", format!("{:.02}%", percent))
-            );
-            stderr().flush()?;
-        }
-        "finished" => {
-            eprintln!("-> Finished downloading.");
-        }
-        "error" => {
-            // TODO: This should probably return an Err. But I'm not so sure where the error would
-            // bubble up to (i.e., who would catch it) <2025-01-21>
-            eprintln!("-> Error while downloading: {}", get_title());
-            process::exit(1);
-        }
-        other => unreachable!("'{other}' should not be a valid state!"),
-    };
+                {
+                    let add_filter = ytdl_logger.get_attr("addFilter", vm)?;
+                    add_filter.call(
+                        (vm.new_function("yt_dlp_error_filter", filter_error_log),),
+                        vm,
+                    )?;
+                }
 
-    Ok(())
-}
+                opts.set_item("logger", ytdl_logger, vm)?;
+            }
 
-pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python<'_>) -> PyResult<Bound<'a, PyDict>> {
-    if let Some(hooks) = opts.get_item("progress_hooks")? {
-        let hooks = hooks.downcast::<PyList>()?;
-        hooks.append(wrap_pyfunction!(progress_hook, py)?)?;
+            let youtube_dl_class = class.call((opts,), vm)?;
 
-        opts.set_item("progress_hooks", hooks)?;
-    } else {
-        // No hooks are set yet
-        let hooks_list = PyList::new(py, &[wrap_pyfunction!(progress_hook, py)?])?;
+            Ok::<_, PyRef<PyBaseException>>((yt_dlp_module, youtube_dl_class))
+        }) {
+            Ok(ok) => ok,
+            Err(err) => {
+                interpreter.finalize(Some(err));
+                return Err(build::Error::Python);
+            }
+        };
 
-        opts.set_item("progress_hooks", hooks_list)?;
+        Ok(Self {
+            interpreter,
+            youtube_dl_class,
+            yt_dlp_module,
+            options: output_options,
+        })
     }
 
-    Ok(opts)
-}
-
-/// Take the result of the ie (may be modified) and resolve all unresolved
-/// references (URLs, playlist items).
-///
-/// It will also download the videos if 'download'.
-/// Returns the resolved `ie_result`.
-#[allow(clippy::unused_async)]
-#[allow(clippy::missing_panics_doc)]
-pub async fn process_ie_result(
-    yt_dlp_opts: &Map<String, Value>,
-    ie_result: InfoJson,
-    download: bool,
-) -> Result<InfoJson, YtDlpError> {
-    Python::with_gil(|py| -> Result<InfoJson, YtDlpError> {
-        let opts = json_map_to_py_dict(yt_dlp_opts, py)?;
-
-        let instance = get_yt_dlp(py, opts)?;
-
-        let args = {
-            let ie_result = json_loads_str(py, ie_result)?;
-            (ie_result,)
-        };
+    /// # Panics
+    ///
+    /// If `yt_dlp` changed their location or type of `__version__`.
+    pub fn version(&self) -> String {
+        let str_ref: PyRef<PyStr> = self.interpreter.enter_and_expect(
+            |vm| {
+                let version_module = self.yt_dlp_module.get_attr("version", vm)?;
+                let version = version_module.get_attr("__version__", vm)?;
+                let version = version.downcast().expect("This should always be a string");
+                Ok(version)
+            },
+            "yt_dlp version location has changed",
+        );
+        str_ref.to_string()
+    }
 
-        let kwargs = PyDict::new(py);
-        kwargs.set_item("download", download)?;
+    /// Download a given list of URLs.
+    /// Returns the paths they were downloaded to.
+    ///
+    /// # Errors
+    /// If one of the downloads error.
+    pub fn download(&self, urls: &[Url]) -> Result<Vec<PathBuf>, extract_info::Error> {
+        let mut out_paths = Vec::with_capacity(urls.len());
+
+        for url in urls {
+            info!("Started downloading url: '{url}'");
+            let info_json = self.extract_info(url, true, true)?;
+
+            // Try to work around yt-dlp type weirdness
+            let result_string = if let Some(filename) = info_json.get("filename") {
+                PathBuf::from(json_cast!(filename, as_str))
+            } else {
+                PathBuf::from(json_get!(
+                    json_cast!(
+                        json_get!(info_json, "requested_downloads", as_array)[0],
+                        as_object
+                    ),
+                    "filename",
+                    as_str
+                ))
+            };
 
-        let result = instance
-            .call_method("process_ie_result", args, Some(&kwargs))?
-            .downcast_into::<PyDict>()
-            .expect("This is a dict");
+            out_paths.push(result_string);
+            info!("Finished downloading url");
+        }
 
-        let result_str = json_dumps(py, result.into_any())?;
+        Ok(out_paths)
+    }
 
-        serde_json::from_str(&result_str).map_err(Into::into)
-    })
-}
+    /// `extract_info(self, url, download=True, ie_key=None, extra_info=None, process=True, force_generic_extractor=False)`
+    ///
+    /// Extract and return the information dictionary of the URL
+    ///
+    /// Arguments:
+    /// - `url`          URL to extract
+    ///
+    /// Keyword arguments:
+    /// :`download`     Whether to download videos
+    /// :`process`      Whether to resolve all unresolved references (URLs, playlist items).
+    ///                 Must be True for download to work
+    ///
+    /// # Panics
+    /// If expectations about python fail to hold.
+    ///
+    /// # Errors
+    /// If python operations fail.
+    pub fn extract_info(
+        &self,
+        url: &Url,
+        download: bool,
+        process: bool,
+    ) -> Result<InfoJson, extract_info::Error> {
+        match self.interpreter.enter(|vm| {
+            let pos_args = PosArgs::new(vec![vm.new_pyobj(url.to_string())]);
+
+            let kw_args = KwArgs::new({
+                let mut map = IndexMap::new();
+                map.insert("download".to_owned(), vm.new_pyobj(download));
+                map.insert("process".to_owned(), vm.new_pyobj(process));
+                map
+            });
+
+            let fun_args = FuncArgs::new(pos_args, kw_args);
+
+            let inner = self.youtube_dl_class.get_attr("extract_info", vm)?;
+            let result = inner
+                .call_with_args(fun_args, vm)?
+                .downcast::<PyDict>()
+                .expect("This is a dict");
+
+            // Resolve the generator object
+            if let Ok(generator) = result.get_item("entries", vm) {
+                if generator.payload_is::<PyList>() {
+                    // already resolved. Do nothing
+                } else {
+                    let max_backlog = self.options.get("playlistend").map_or(10000, |value| {
+                        usize::try_from(value.as_u64().expect("Works")).expect("Should work")
+                    });
+
+                    let mut out = vec![];
+                    let next = generator.get_attr("__next__", vm)?;
+                    while let Ok(output) = next.call((), vm) {
+                        out.push(output);
+
+                        if out.len() == max_backlog {
+                            break;
+                        }
+                    }
+                    result.set_item("entries", vm.new_pyobj(out), vm)?;
+                }
+            }
 
-/// `extract_info(self, url, download=True, ie_key=None, extra_info=None, process=True, force_generic_extractor=False)`
-///
-/// Extract and return the information dictionary of the URL
-///
-/// Arguments:
-/// @param url          URL to extract
-///
-/// Keyword arguments:
-/// @param download     Whether to download videos
-/// @param process      Whether to resolve all unresolved references (URLs, playlist items).
-///                     Must be True for download to work
-/// @param `ie_key`       Use only the extractor with this key
-///
-/// @param `extra_info`   Dictionary containing the extra values to add to the info (For internal use only)
-/// @`force_generic_extractor`  Force using the generic extractor (Deprecated; use `ie_key`='Generic')
-#[allow(clippy::unused_async)]
-#[allow(clippy::missing_panics_doc)]
-pub async fn extract_info(
-    yt_dlp_opts: &Map<String, Value>,
-    url: &Url,
-    download: bool,
-    process: bool,
-) -> Result<InfoJson, YtDlpError> {
-    Python::with_gil(|py| -> Result<InfoJson, YtDlpError> {
-        let opts = json_map_to_py_dict(yt_dlp_opts, py)?;
-
-        let instance = get_yt_dlp(py, opts)?;
-        let args = (url.as_str(),);
-
-        let kwargs = PyDict::new(py);
-        kwargs.set_item("download", download)?;
-        kwargs.set_item("process", process)?;
-
-        let result = instance
-            .call_method("extract_info", args, Some(&kwargs))?
-            .downcast_into::<PyDict>()
-            .expect("This is a dict");
-
-        // Resolve the generator object
-        if let Some(generator) = result.get_item("entries")? {
-            if generator.is_instance_of::<PyList>() {
-                // already resolved. Do nothing
-            } else {
-                let max_backlog = yt_dlp_opts.get("playlistend").map_or(10000, |value| {
-                    usize::try_from(value.as_u64().expect("Works")).expect("Should work")
-                });
+            let result = {
+                let sanitize = self.youtube_dl_class.get_attr("sanitize_info", vm)?;
+                let value = sanitize.call((result,), vm)?;
 
-                let mut out = vec![];
-                while let Ok(output) = generator.call_method0("__next__") {
-                    out.push(output);
+                value.downcast::<PyDict>().expect("This should stay a dict")
+            };
 
-                    if out.len() == max_backlog {
-                        break;
-                    }
+            let result_json = json_dumps(result, vm);
+
+            if let Ok(confirm) = env::var("YT_STORE_INFO_JSON") {
+                if confirm == "yes" {
+                    let mut file = File::create("output.info.json").unwrap();
+                    write!(
+                        file,
+                        "{}",
+                        serde_json::to_string_pretty(&serde_json::Value::Object(
+                            result_json.clone()
+                        ))
+                        .expect("Valid json")
+                    )
+                    .unwrap();
                 }
-                result.set_item("entries", out)?;
+            }
+
+            Ok::<_, PyRef<PyBaseException>>(result_json)
+        }) {
+            Ok(ok) => Ok(ok),
+            Err(err) => {
+                self.interpreter.enter(|vm| {
+                    vm.print_exception(err);
+                });
+                Err(extract_info::Error::Python)
             }
         }
+    }
+
+    /// Take the (potentially modified) result of the information extractor (i.e.,
+    /// [`Self::extract_info`] with `process` and `download` set to false)
+    /// and resolve all unresolved references (URLs,
+    /// playlist items).
+    ///
+    /// It will also download the videos if 'download' is true.
+    /// Returns the resolved `ie_result`.
+    ///
+    /// # Panics
+    /// If expectations about python fail to hold.
+    ///
+    /// # Errors
+    /// If python operations fail.
+    pub fn process_ie_result(
+        &self,
+        ie_result: InfoJson,
+        download: bool,
+    ) -> Result<InfoJson, process_ie_result::Error> {
+        match self.interpreter.enter(|vm| {
+            let pos_args = PosArgs::new(vec![vm.new_pyobj(json_loads(ie_result, vm))]);
+
+            let kw_args = KwArgs::new({
+                let mut map = IndexMap::new();
+                map.insert("download".to_owned(), vm.new_pyobj(download));
+                map
+            });
+
+            let fun_args = FuncArgs::new(pos_args, kw_args);
+
+            let inner = self.youtube_dl_class.get_attr("process_ie_result", vm)?;
+            let result = inner
+                .call_with_args(fun_args, vm)?
+                .downcast::<PyDict>()
+                .expect("This is a dict");
+
+            let result = {
+                let sanitize = self.youtube_dl_class.get_attr("sanitize_info", vm)?;
+                let value = sanitize.call((result,), vm)?;
+
+                value.downcast::<PyDict>().expect("This should stay a dict")
+            };
 
-        let result_str = json_dumps(py, result.into_any())?;
+            let result_json = json_dumps(result, vm);
 
-        if let Ok(confirm) = env::var("YT_STORE_INFO_JSON") {
-            if confirm == "yes" {
-                let mut file = File::create("output.info.json")?;
-                write!(file, "{result_str}").unwrap();
+            Ok::<_, PyRef<PyBaseException>>(result_json)
+        }) {
+            Ok(ok) => Ok(ok),
+            Err(err) => {
+                self.interpreter.enter(|vm| {
+                    vm.print_exception(err);
+                });
+                Err(process_ie_result::Error::Python)
             }
         }
-
-        serde_json::from_str(&result_str).map_err(Into::into)
-    })
+    }
 }
 
-/// # Panics
-/// Only if python fails to return a valid URL.
-pub fn unsmuggle_url(smug_url: &Url) -> PyResult<Url> {
-    Python::with_gil(|py| {
-        let utils = get_yt_dlp_utils(py)?;
-        let url = utils
-            .call_method1("unsmuggle_url", (smug_url.as_str(),))?
-            .downcast::<PyTuple>()?
-            .get_item(0)?;
-
-        let url: Url = url
-            .downcast::<PyString>()?
-            .to_string()
-            .parse()
-            .expect("Python should be able to return a valid url");
-
-        Ok(url)
-    })
+#[allow(missing_docs)]
+pub mod process_ie_result {
+    #[derive(Debug, thiserror::Error, Clone, Copy)]
+    pub enum Error {
+        #[error("Python threw an exception")]
+        Python,
+    }
 }
-
-/// Download a given list of URLs.
-/// Returns the paths they were downloaded to.
-///
-/// # Panics
-/// Only if `yt_dlp` changes their `info_json` schema.
-pub async fn download(
-    urls: &[Url],
-    download_options: &Map<String, Value>,
-) -> Result<Vec<PathBuf>, YtDlpError> {
-    let mut out_paths = Vec::with_capacity(urls.len());
-
-    for url in urls {
-        info!("Started downloading url: '{}'", url);
-        let info_json = extract_info(download_options, url, true, true).await?;
-
-        // Try to work around yt-dlp type weirdness
-        let result_string = if let Some(filename) = info_json.filename {
-            filename
-        } else {
-            info_json.requested_downloads.expect("This must exist")[0]
-                .filename
-                .clone()
-        };
-
-        out_paths.push(result_string);
-        info!("Finished downloading url: '{}'", url);
+#[allow(missing_docs)]
+pub mod extract_info {
+    #[derive(Debug, thiserror::Error, Clone, Copy)]
+    pub enum Error {
+        #[error("Python threw an exception")]
+        Python,
     }
-
-    Ok(out_paths)
 }
 
-fn json_map_to_py_dict<'a>(
-    map: &Map<String, Value>,
-    py: Python<'a>,
-) -> PyResult<Bound<'a, PyDict>> {
-    let json_string = serde_json::to_string(&map).expect("This must always work");
+pub type InfoJson = serde_json::Map<String, serde_json::Value>;
+pub type ProgressHookFunction = fn(input: FuncArgs, vm: &VirtualMachine);
 
-    let python_dict = json_loads(py, json_string)?;
-
-    Ok(python_dict)
+/// Options, that are used to customize the download behaviour.
+///
+/// In the future, this might get a Builder api.
+///
+/// See `help(yt_dlp.YoutubeDL())` from python for a full list of available options.
+#[derive(Default, Debug)]
+pub struct YoutubeDLOptions {
+    options: serde_json::Map<String, serde_json::Value>,
+    progress_hook: Option<ProgressHookFunction>,
 }
 
-fn json_dumps(py: Python<'_>, input: Bound<'_, PyAny>) -> PyResult<String> {
-    //     json.dumps(yt_dlp.sanitize_info(input))
+impl YoutubeDLOptions {
+    #[must_use]
+    pub fn new() -> Self {
+        Self {
+            options: serde_json::Map::new(),
+            progress_hook: None,
+        }
+    }
 
-    let yt_dlp = get_yt_dlp(py, PyDict::new(py))?;
-    let sanitized_result = yt_dlp.call_method1("sanitize_info", (input,))?;
+    #[must_use]
+    pub fn set(self, key: impl Into<String>, value: impl Into<serde_json::Value>) -> Self {
+        let mut options = self.options;
+        options.insert(key.into(), value.into());
 
-    let json = PyModule::import(py, "json")?;
-    let dumps = json.getattr("dumps")?;
+        Self {
+            options,
+            progress_hook: self.progress_hook,
+        }
+    }
 
-    let output = dumps.call1((sanitized_result,))?;
+    #[must_use]
+    pub fn with_progress_hook(self, progress_hook: ProgressHookFunction) -> Self {
+        if let Some(_previous_hook) = self.progress_hook {
+            todo!()
+        } else {
+            Self {
+                options: self.options,
+                progress_hook: Some(progress_hook),
+            }
+        }
+    }
 
-    let output_str = output.extract::<String>()?;
+    /// # Errors
+    /// If the underlying [`YoutubeDL::from_options`] errors.
+    pub fn build(self) -> Result<YoutubeDL, build::Error> {
+        YoutubeDL::from_options(self)
+    }
 
-    Ok(output_str)
-}
+    #[must_use]
+    pub fn from_json_options(options: serde_json::Map<String, serde_json::Value>) -> Self {
+        Self {
+            options,
+            progress_hook: None,
+        }
+    }
 
-fn json_loads_str<T: Serialize>(py: Python<'_>, input: T) -> PyResult<Bound<'_, PyDict>> {
-    let string = serde_json::to_string(&input).expect("Correct json must be pased");
+    #[must_use]
+    pub fn get(&self, key: &str) -> Option<&serde_json::Value> {
+        self.options.get(key)
+    }
 
-    json_loads(py, string)
+    fn into_py_dict(self, vm: &VirtualMachine) -> PyRef<PyDict> {
+        json_loads(self.options, vm)
+    }
 }
 
-fn json_loads(py: Python<'_>, input: String) -> PyResult<Bound<'_, PyDict>> {
-    //     json.loads(input)
-
-    let json = PyModule::import(py, "json")?;
-    let dumps = json.getattr("loads")?;
+#[allow(missing_docs)]
+pub mod build {
+    #[derive(Debug, thiserror::Error)]
+    pub enum Error {
+        #[error("Python threw an exception")]
+        Python,
 
-    let output = dumps.call1((input,))?;
-
-    Ok(output
-        .downcast::<PyDict>()
-        .expect("This should always be a PyDict")
-        .clone())
+        #[error("Io error: {0}")]
+        Io(#[from] std::io::Error),
+    }
 }
 
-fn get_yt_dlp_utils(py: Python<'_>) -> PyResult<Bound<'_, PyAny>> {
-    let yt_dlp = PyModule::import(py, "yt_dlp")?;
-    let utils = yt_dlp.getattr("utils")?;
-
-    Ok(utils)
+fn json_loads(
+    input: serde_json::Map<String, serde_json::Value>,
+    vm: &VirtualMachine,
+) -> PyRef<PyDict> {
+    let json = vm.import("json", 0).expect("Module exists");
+    let loads = json.get_attr("loads", vm).expect("Method exists");
+    let self_str = serde_json::to_string(&serde_json::Value::Object(input)).expect("Vaild json");
+    let dict = loads
+        .call((self_str,), vm)
+        .expect("Vaild json is always a valid dict");
+
+    dict.downcast().expect("Should always be a dict")
 }
-fn get_yt_dlp<'a>(py: Python<'a>, opts: Bound<'a, PyDict>) -> PyResult<Bound<'a, PyAny>> {
-    // Unconditionally set a logger
-    let opts = add_logger_and_sig_handler(opts, py)?;
-    let opts = add_hooks(opts, py)?;
 
-    let yt_dlp = PyModule::import(py, "yt_dlp")?;
-    let youtube_dl = yt_dlp.call_method1("YoutubeDL", (opts,))?;
-
-    Ok(youtube_dl)
+/// # Panics
+/// If expectation about python operations fail.
+pub fn json_dumps(
+    input: PyRef<PyDict>,
+    vm: &VirtualMachine,
+) -> serde_json::Map<String, serde_json::Value> {
+    let json = vm.import("json", 0).expect("Module exists");
+    let dumps = json.get_attr("dumps", vm).expect("Method exists");
+    let dict = dumps
+        .call((input,), vm)
+        .map_err(|err| vm.print_exception(err))
+        .expect("Might not always work, but for our dicts it works");
+
+    let string: PyRef<PyStr> = dict.downcast().expect("Should always be a string");
+
+    let real_string = string.to_str().expect("Should be valid utf8");
+
+    // {
+    //     let mut file = File::create("debug.dump.json").unwrap();
+    //     write!(file, "{}", real_string).unwrap();
+    // }
+
+    let value: serde_json::Value = serde_json::from_str(real_string).expect("Should be valid json");
+
+    match value {
+        serde_json::Value::Object(map) => map,
+        _ => unreachable!("These should not be json.dumps output"),
+    }
 }
diff --git a/crates/yt_dlp/src/logging.rs b/crates/yt_dlp/src/logging.rs
index e731502..5cb4c1d 100644
--- a/crates/yt_dlp/src/logging.rs
+++ b/crates/yt_dlp/src/logging.rs
@@ -10,34 +10,66 @@
 
 // This file is taken from: https://github.com/dylanbstorey/pyo3-pylogger/blob/d89e0d6820ebc4f067647e3b74af59dbc4941dd5/src/lib.rs
 // It is licensed under the Apache 2.0 License, copyright up to 2024 by Dylan Storey
-// It was modified by Benedikt Peetz 2024
-
-// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
-#![allow(unsafe_op_in_unsafe_fn)]
-
-use std::ffi::CString;
+// It was modified by Benedikt Peetz 2024, 2025
 
 use log::{Level, MetadataBuilder, Record, logger};
-use pyo3::{
-    Bound, PyAny, PyResult, Python,
-    prelude::{PyAnyMethods, PyListMethods, PyModuleMethods},
-    pyfunction, wrap_pyfunction,
+use rustpython::vm::{
+    PyObjectRef, PyRef, PyResult, VirtualMachine,
+    builtins::{PyInt, PyList, PyStr},
+    convert::ToPyObject,
+    function::FuncArgs,
 };
 
 /// Consume a Python `logging.LogRecord` and emit a Rust `Log` instead.
-#[allow(clippy::needless_pass_by_value)]
-#[pyfunction]
-fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
-    let level = record.getattr("levelno")?;
-    let message = record.getattr("getMessage")?.call0()?.to_string();
-    let pathname = record.getattr("pathname")?.to_string();
-    let lineno = record
-        .getattr("lineno")?
-        .to_string()
-        .parse::<u32>()
-        .expect("This should always be a u32");
-
-    let logger_name = record.getattr("name")?.to_string();
+fn host_log(mut input: FuncArgs, vm: &VirtualMachine) -> PyResult<()> {
+    let record = input.args.remove(0);
+    let rust_target = {
+        let base: PyRef<PyStr> = input.args.remove(0).downcast().expect("Should be a string");
+        base.as_str().to_owned()
+    };
+
+    let level = {
+        let level: PyRef<PyInt> = record
+            .get_attr("levelno", vm)?
+            .downcast()
+            .expect("Should always be an int");
+        level.as_u32_mask()
+    };
+    let message = {
+        let get_message = record.get_attr("getMessage", vm)?;
+        let message: PyRef<PyStr> = get_message
+            .call((), vm)?
+            .downcast()
+            .expect("Downcasting works");
+
+        message.as_str().to_owned()
+    };
+
+    let pathname = {
+        let pathname: PyRef<PyStr> = record
+            .get_attr("pathname", vm)?
+            .downcast()
+            .expect("Is a string");
+
+        pathname.as_str().to_owned()
+    };
+
+    let lineno = {
+        let lineno: PyRef<PyInt> = record
+            .get_attr("lineno", vm)?
+            .downcast()
+            .expect("Is a number");
+
+        lineno.as_u32_mask()
+    };
+
+    let logger_name = {
+        let name: PyRef<PyStr> = record
+            .get_attr("name", vm)?
+            .downcast()
+            .expect("Should be a string");
+        name.as_str().to_owned()
+    };
 
     let full_target: Option<String> = if logger_name.trim().is_empty() || logger_name == "root" {
         None
@@ -48,25 +80,25 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
         Some(format!("{rust_target}::{logger_name}"))
     };
 
-    let target = full_target.as_deref().unwrap_or(rust_target);
+    let target = full_target.as_deref().unwrap_or(&rust_target);
 
     // error
-    let error_metadata = if level.ge(40u8)? {
+    let error_metadata = if level >= 40 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Error)
             .build()
-    } else if level.ge(30u8)? {
+    } else if level >= 30 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Warn)
             .build()
-    } else if level.ge(20u8)? {
+    } else if level >= 20 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Info)
             .build()
-    } else if level.ge(10u8)? {
+    } else if level >= 10 {
         MetadataBuilder::new()
             .target(target)
             .level(Level::Debug)
@@ -98,13 +130,24 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
 /// # Panics
 /// Only if internal assertions fail.
 #[allow(clippy::module_name_repetitions)]
-pub fn setup_logging(py: Python<'_>, target: &str) -> PyResult<()> {
-    let logging = py.import("logging")?;
+pub(super) fn setup_logging(vm: &VirtualMachine, target: &str) -> PyResult<PyObjectRef> {
+    let logging = vm.import("logging", 0)?;
 
-    logging.setattr("host_log", wrap_pyfunction!(host_log, &logging)?)?;
+    let scope = vm.new_scope_with_builtins();
 
-    py.run(
-        CString::new(format!(
+    for (key, value) in logging.dict().expect("Should be a dict") {
+        let key: PyRef<PyStr> = key.downcast().expect("Is a string");
+
+        scope.globals.set_item(key.as_str(), value, vm)?;
+    }
+    scope
+        .globals
+        .set_item("host_log", vm.new_function("host_log", host_log).into(), vm)?;
+
+    let local_scope = scope.clone();
+    vm.run_code_string(
+        local_scope,
+        format!(
             r#"
 class HostHandler(Handler):
     def __init__(self, level=0):
@@ -119,15 +162,36 @@ def basicConfig(*pargs, **kwargs):
         kwargs["handlers"] = [HostHandler()]
     return oldBasicConfig(*pargs, **kwargs)
 "#
-        ))
-        .expect("This is hardcoded")
-        .as_c_str(),
-        Some(&logging.dict()),
-        None,
+        )
+        .as_str(),
+        "<embedded logging inintializing code>".to_owned(),
     )?;
 
-    let all = logging.index()?;
-    all.append("HostHandler")?;
-
-    Ok(())
+    let all: PyRef<PyList> = logging
+        .get_attr("__all__", vm)?
+        .downcast()
+        .expect("Is a list");
+    all.borrow_vec_mut().push(vm.new_pyobj("HostHandler"));
+
+    // {
+    //     let logging_dict = logging.dict().expect("Exists");
+    //
+    //     for (key, val) in scope.globals {
+    //         let key: PyRef<PyStr> = key.downcast().expect("Is a string");
+    //
+    //         if !logging_dict.contains_key(key.as_str(), vm) {
+    //             logging_dict.set_item(key.as_str(), val, vm)?;
+    //         }
+    //     }
+    //
+    //     for (key, val) in scope.locals {
+    //         let key: PyRef<PyStr> = key.downcast().expect("Is a string");
+    //
+    //         if !logging_dict.contains_key(key.as_str(), vm) {
+    //             logging_dict.set_item(key.as_str(), val, vm)?;
+    //         }
+    //     }
+    // }
+
+    Ok(scope.globals.to_pyobject(vm))
 }
diff --git a/crates/yt_dlp/src/progress_hook.rs b/crates/yt_dlp/src/progress_hook.rs
new file mode 100644
index 0000000..7a7628a
--- /dev/null
+++ b/crates/yt_dlp/src/progress_hook.rs
@@ -0,0 +1,41 @@
+#[macro_export]
+macro_rules! mk_python_function {
+    ($name:ident, $new_name:ident) => {
+        pub fn $new_name(
+            mut args: $crate::progress_hook::rustpython::vm::function::FuncArgs,
+            vm: &$crate::progress_hook::rustpython::vm::VirtualMachine,
+        ) {
+            use $crate::progress_hook::rustpython;
+
+            let input = {
+                let dict: rustpython::vm::PyRef<rustpython::vm::builtins::PyDict> = args
+                    .args
+                    .remove(0)
+                    .downcast()
+                    .expect("The progress hook is always called with these args");
+                let new_dict = rustpython::vm::builtins::PyDict::new_ref(&vm.ctx);
+                dict.into_iter()
+                    .filter_map(|(name, value)| {
+                        let real_name: rustpython::vm::PyRefExact<rustpython::vm::builtins::PyStr> =
+                            name.downcast_exact(vm).expect("Is a string");
+                        let name_str = real_name.to_str().expect("Is a string");
+                        if name_str.starts_with('_') {
+                            None
+                        } else {
+                            Some((name_str.to_owned(), value))
+                        }
+                    })
+                    .for_each(|(key, value)| {
+                        new_dict
+                            .set_item(&key, value, vm)
+                            .expect("This is a transpositions, should always be valid");
+                    });
+
+                $crate::json_dumps(new_dict, vm)
+            };
+            $name(input).expect("Shall not fail!");
+        }
+    };
+}
+
+pub use rustpython;
diff --git a/crates/yt_dlp/src/python_json_decode_failed.error_msg b/crates/yt_dlp/src/python_json_decode_failed.error_msg
deleted file mode 100644
index d10688e..0000000
--- a/crates/yt_dlp/src/python_json_decode_failed.error_msg
+++ /dev/null
@@ -1,5 +0,0 @@
-Failed to decode yt-dlp's response: {}
-
-This is probably a bug.
-Try running the command again with the `YT_STORE_INFO_JSON=yes` environment variable set
-and maybe debug it further via `yt check info-json output.info.json`.
diff --git a/crates/yt_dlp/src/python_json_decode_failed.error_msg.license b/crates/yt_dlp/src/python_json_decode_failed.error_msg.license
deleted file mode 100644
index 7813eb6..0000000
--- a/crates/yt_dlp/src/python_json_decode_failed.error_msg.license
+++ /dev/null
@@ -1,9 +0,0 @@
-yt - A fully featured command line YouTube client
-
-Copyright (C) 2025 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-SPDX-License-Identifier: GPL-3.0-or-later
-
-This file is part of Yt.
-
-You should have received a copy of the License along with this program.
-If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
diff --git a/crates/yt_dlp/src/tests.rs b/crates/yt_dlp/src/tests.rs
deleted file mode 100644
index 91b6626..0000000
--- a/crates/yt_dlp/src/tests.rs
+++ /dev/null
@@ -1,89 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use std::sync::LazyLock;
-
-use serde_json::{Value, json};
-use url::Url;
-
-static YT_OPTS: LazyLock<serde_json::Map<String, Value>> = LazyLock::new(|| {
-    match json!({
-        "playliststart": 1,
-        "playlistend": 10,
-        "noplaylist": false,
-        "extract_flat": false,
-    }) {
-        Value::Object(obj) => obj,
-        _ => unreachable!("This json is hardcoded"),
-    }
-});
-
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_video() {
-    let info = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/watch?v=dbjPnXaacAU").expect("Is valid."),
-        false,
-        false,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{info:#?}");
-}
-
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_url() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://google.com").expect("Is valid."),
-        false,
-        false,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
-
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_playlist() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/@TheGarriFrischer/videos").expect("Is valid."),
-        false,
-        true,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
-#[tokio::test]
-#[ignore = "This test hangs forever"]
-async fn test_extract_info_playlist_full() {
-    let err = crate::extract_info(
-        &YT_OPTS,
-        &Url::parse("https://www.youtube.com/@NixOS-Foundation/videos").expect("Is valid."),
-        false,
-        true,
-    )
-    .await
-    .map_err(|err| format!("Encountered error: '{err}'"))
-    .unwrap();
-
-    println!("{err:#?}");
-}
diff --git a/crates/yt_dlp/src/wrapper/info_json.rs b/crates/yt_dlp/src/wrapper/info_json.rs
deleted file mode 100644
index a2c00df..0000000
--- a/crates/yt_dlp/src/wrapper/info_json.rs
+++ /dev/null
@@ -1,824 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-// `yt_dlp` named them like this.
-#![allow(clippy::pub_underscore_fields)]
-
-use std::{collections::HashMap, path::PathBuf};
-
-use pyo3::{Bound, PyResult, Python, types::PyDict};
-use serde::{Deserialize, Deserializer, Serialize};
-use serde_json::Value;
-use url::Url;
-
-use crate::json_loads_str;
-
-type Todo = String;
-type Extractor = String;
-type ExtractorKey = String;
-
-// TODO: Change this to map `_type` to a structure of values, instead of the options <2024-05-27>
-// And replace all the strings with better types (enums or urls)
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct InfoJson {
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __files_to_move: Option<FilesToMove>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __last_playlist_index: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __post_extractor: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub __x_forwarded_for_ip: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _filename: Option<PathBuf>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _format_sort_fields: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _has_drm: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _type: Option<InfoType>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub _version: Option<Version>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub abr: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub acodec: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub age_limit: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub artists: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub aspect_ratio: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub asr: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub audio_channels: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub audio_ext: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub automatic_captions: Option<HashMap<String, Vec<Caption>>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub availability: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub average_rating: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub categories: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_follower_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_is_verified: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub channel_url: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub chapters: Option<Vec<Chapter>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub comment_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub comments: Option<Vec<Comment>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub concurrent_view_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub container: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub description: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub direct: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub display_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub downloader_options: Option<DownloaderOptions>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub duration: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub duration_string: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub dynamic_range: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub entries: Option<Vec<InfoJson>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub episode: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub episode_number: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub epoch: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub ext: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub extractor: Option<Extractor>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub extractor_key: Option<ExtractorKey>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub filename: Option<PathBuf>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub filesize: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub filesize_approx: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format_index: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub format_note: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub formats: Option<Vec<Format>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub fps: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub fulltitle: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub genre: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub genres: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub has_drm: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub heatmap: Option<Vec<HeatMapEntry>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub height: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub hls_aes: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub http_headers: Option<HttpHeader>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub ie_key: Option<ExtractorKey>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub is_live: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub language: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub language_preference: Option<i32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub license: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub like_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub live_status: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub location: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub manifest_url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub media_type: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub modified_date: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub n_entries: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub original_url: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playable_in_embed: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_autonumber: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_channel: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_channel_id: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_id: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_index: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_title: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_uploader: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_uploader_id: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub playlist_webpage_url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub preference: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub protocol: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub quality: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub release_date: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub release_timestamp: Option<u64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub release_year: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub repost_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_downloads: Option<Vec<RequestedDownloads>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_entries: Option<Vec<u32>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_formats: Option<Vec<Format>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub requested_subtitles: Option<HashMap<String, Subtitle>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub resolution: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub season: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub season_number: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub series: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub source_preference: Option<i32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub sponsorblock_chapters: Option<Vec<SponsorblockChapter>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub stretched_ratio: Option<Todo>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub subtitles: Option<HashMap<String, Vec<Caption>>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub tags: Option<Vec<String>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub tbr: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub thumbnail: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub thumbnails: Option<Vec<ThumbNail>>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub timestamp: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub title: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub upload_date: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub uploader: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub uploader_id: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub uploader_url: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub vbr: Option<f64>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub vcodec: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub video_ext: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub view_count: Option<u32>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub was_live: Option<bool>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub webpage_url: Option<Url>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub webpage_url_basename: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub webpage_url_domain: Option<String>,
-
-    #[serde(skip_serializing_if = "Option::is_none")]
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct FilesToMove {}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct RequestedDownloads {
-    pub __files_to_merge: Option<Vec<Todo>>,
-    pub __finaldir: PathBuf,
-    pub __infojson_filename: PathBuf,
-    pub __postprocessors: Vec<Todo>,
-    pub __real_download: bool,
-    pub __write_download_archive: bool,
-    pub _filename: PathBuf,
-    pub _type: InfoType,
-    pub _version: Version,
-    pub abr: f64,
-    pub acodec: String,
-    pub aspect_ratio: Option<f64>,
-    pub asr: Option<u32>,
-    pub audio_channels: Option<u32>,
-    pub audio_ext: Option<String>,
-    pub chapters: Option<Vec<SponsorblockChapter>>,
-    pub duration: Option<f64>,
-    pub dynamic_range: Option<String>,
-    pub ext: String,
-    pub filename: PathBuf,
-    pub filepath: PathBuf,
-    pub filesize_approx: Option<u64>,
-    pub format: String,
-    pub format_id: String,
-    pub format_note: Option<String>,
-    pub fps: Option<f64>,
-    pub has_drm: Option<bool>,
-    pub height: Option<u32>,
-    pub http_headers: Option<HttpHeader>,
-    pub infojson_filename: PathBuf,
-    pub language: Option<String>,
-    pub manifest_url: Option<Url>,
-    pub protocol: String,
-    pub quality: Option<i64>,
-    pub requested_formats: Option<Vec<Format>>,
-    pub resolution: String,
-    pub tbr: f64,
-    pub url: Option<Url>,
-    pub vbr: f64,
-    pub vcodec: String,
-    pub video_ext: Option<String>,
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Subtitle {
-    pub ext: SubtitleExt,
-    pub filepath: PathBuf,
-    pub filesize: Option<u64>,
-    pub fragment_base_url: Option<Url>,
-    pub fragments: Option<Vec<Fragment>>,
-    pub manifest_url: Option<Url>,
-    pub name: Option<String>,
-    pub protocol: Option<Todo>,
-    pub url: Url,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-pub enum SubtitleExt {
-    #[serde(alias = "vtt")]
-    Vtt,
-
-    #[serde(alias = "mp4")]
-    Mp4,
-
-    #[serde(alias = "json")]
-    Json,
-    #[serde(alias = "json3")]
-    Json3,
-
-    #[serde(alias = "ttml")]
-    Ttml,
-
-    #[serde(alias = "srv1")]
-    Srv1,
-    #[serde(alias = "srv2")]
-    Srv2,
-    #[serde(alias = "srv3")]
-    Srv3,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Caption {
-    pub ext: SubtitleExt,
-    pub filepath: Option<PathBuf>,
-    pub filesize: Option<u64>,
-    pub fragments: Option<Vec<SubtitleFragment>>,
-    pub fragment_base_url: Option<Url>,
-    pub manifest_url: Option<Url>,
-    pub name: Option<String>,
-    pub protocol: Option<String>,
-    pub url: String,
-    pub video_id: Option<String>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct SubtitleFragment {
-    path: PathBuf,
-    duration: Option<f64>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Chapter {
-    pub end_time: f64,
-    pub start_time: f64,
-    pub title: String,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq)]
-#[serde(deny_unknown_fields)]
-pub struct SponsorblockChapter {
-    /// This is an utterly useless field, and should thus be ignored
-    pub _categories: Option<Vec<Vec<Value>>>,
-
-    pub categories: Option<Vec<SponsorblockChapterCategory>>,
-    pub category: Option<SponsorblockChapterCategory>,
-    pub category_names: Option<Vec<String>>,
-    pub end_time: f64,
-    pub name: Option<String>,
-    pub r#type: Option<SponsorblockChapterType>,
-    pub start_time: f64,
-    pub title: String,
-}
-
-pub fn get_none<'de, D, T>(_: D) -> Result<Option<T>, D::Error>
-where
-    D: Deserializer<'de>,
-{
-    Ok(None)
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum SponsorblockChapterType {
-    #[serde(alias = "skip")]
-    Skip,
-
-    #[serde(alias = "chapter")]
-    Chapter,
-
-    #[serde(alias = "poi")]
-    Poi,
-}
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum SponsorblockChapterCategory {
-    #[serde(alias = "filler")]
-    Filler,
-
-    #[serde(alias = "interaction")]
-    Interaction,
-
-    #[serde(alias = "music_offtopic")]
-    MusicOfftopic,
-
-    #[serde(alias = "poi_highlight")]
-    PoiHighlight,
-
-    #[serde(alias = "preview")]
-    Preview,
-
-    #[serde(alias = "sponsor")]
-    Sponsor,
-
-    #[serde(alias = "selfpromo")]
-    SelfPromo,
-
-    #[serde(alias = "chapter")]
-    Chapter,
-
-    #[serde(alias = "intro")]
-    Intro,
-
-    #[serde(alias = "outro")]
-    Outro,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct HeatMapEntry {
-    pub start_time: f64,
-    pub end_time: f64,
-    pub value: f64,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
-#[serde(deny_unknown_fields)]
-pub enum InfoType {
-    #[serde(alias = "playlist")]
-    #[serde(rename(serialize = "playlist"))]
-    Playlist,
-
-    #[serde(alias = "url")]
-    #[serde(rename(serialize = "url"))]
-    Url,
-
-    #[serde(alias = "video")]
-    #[serde(rename(serialize = "video"))]
-    Video,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct Version {
-    pub current_git_head: Option<String>,
-    pub release_git_head: String,
-    pub repository: String,
-    pub version: String,
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(from = "String")]
-#[serde(deny_unknown_fields)]
-pub enum Parent {
-    Root,
-    Id(String),
-}
-
-impl Parent {
-    #[must_use]
-    pub fn id(&self) -> Option<&str> {
-        if let Self::Id(id) = self {
-            Some(id)
-        } else {
-            None
-        }
-    }
-}
-
-impl From<String> for Parent {
-    fn from(value: String) -> Self {
-        if value == "root" {
-            Self::Root
-        } else {
-            Self::Id(value)
-        }
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(from = "String")]
-#[serde(deny_unknown_fields)]
-pub struct Id {
-    pub id: String,
-}
-impl From<String> for Id {
-    fn from(value: String) -> Self {
-        Self {
-            // Take the last element if the string is split with dots, otherwise take the full id
-            id: value.split('.').last().unwrap_or(&value).to_owned(),
-        }
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-#[allow(clippy::struct_excessive_bools)]
-pub struct Comment {
-    pub id: Id,
-    pub text: String,
-    #[serde(default = "zero")]
-    pub like_count: u32,
-    pub is_pinned: bool,
-    pub author_id: String,
-    #[serde(default = "unknown")]
-    pub author: String,
-    pub author_is_verified: bool,
-    pub author_thumbnail: Url,
-    pub parent: Parent,
-    #[serde(deserialize_with = "edited_from_time_text", alias = "_time_text")]
-    pub edited: bool,
-    // Can't also be deserialized, as it's already used in 'edited'
-    // _time_text: String,
-    pub timestamp: i64,
-    pub author_url: Option<Url>,
-    pub author_is_uploader: bool,
-    pub is_favorited: bool,
-}
-fn unknown() -> String {
-    "<Unknown>".to_string()
-}
-fn zero() -> u32 {
-    0
-}
-fn edited_from_time_text<'de, D>(d: D) -> Result<bool, D::Error>
-where
-    D: Deserializer<'de>,
-{
-    let s = String::deserialize(d)?;
-    if s.contains(" (edited)") {
-        Ok(true)
-    } else {
-        Ok(false)
-    }
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct ThumbNail {
-    pub id: Option<String>,
-    pub preference: Option<i32>,
-    /// in the form of "[`height`]x[`width`]"
-    pub resolution: Option<String>,
-    pub url: Url,
-    pub width: Option<u32>,
-    pub height: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Format {
-    pub __needs_testing: Option<bool>,
-    pub __working: Option<bool>,
-    pub abr: Option<f64>,
-    pub acodec: Option<String>,
-    pub aspect_ratio: Option<f64>,
-    pub asr: Option<f64>,
-    pub audio_channels: Option<u32>,
-    pub audio_ext: Option<String>,
-    pub columns: Option<u32>,
-    pub container: Option<String>,
-    pub downloader_options: Option<DownloaderOptions>,
-    pub dynamic_range: Option<String>,
-    pub ext: String,
-    pub filepath: Option<PathBuf>,
-    pub filesize: Option<u64>,
-    pub filesize_approx: Option<u64>,
-    pub format: Option<String>,
-    pub format_id: String,
-    pub format_index: Option<String>,
-    pub format_note: Option<String>,
-    pub fps: Option<f64>,
-    pub fragment_base_url: Option<Todo>,
-    pub fragments: Option<Vec<Fragment>>,
-    pub has_drm: Option<bool>,
-    pub height: Option<u32>,
-    pub http_headers: Option<HttpHeader>,
-    pub is_dash_periods: Option<bool>,
-    pub is_live: Option<bool>,
-    pub language: Option<String>,
-    pub language_preference: Option<i32>,
-    pub manifest_stream_number: Option<u32>,
-    pub manifest_url: Option<Url>,
-    pub preference: Option<i32>,
-    pub protocol: Option<String>,
-    pub quality: Option<f64>,
-    pub resolution: Option<String>,
-    pub rows: Option<u32>,
-    pub source_preference: Option<i32>,
-    pub tbr: Option<f64>,
-    pub url: Url,
-    pub vbr: Option<f64>,
-    pub vcodec: String,
-    pub video_ext: Option<String>,
-    pub width: Option<u32>,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-#[allow(missing_copy_implementations)]
-pub struct DownloaderOptions {
-    http_chunk_size: u64,
-}
-
-#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
-#[serde(deny_unknown_fields)]
-pub struct HttpHeader {
-    #[serde(alias = "User-Agent")]
-    pub user_agent: Option<String>,
-
-    #[serde(alias = "Accept")]
-    pub accept: Option<String>,
-
-    #[serde(alias = "X-Forwarded-For")]
-    pub x_forwarded_for: Option<String>,
-
-    #[serde(alias = "Accept-Language")]
-    pub accept_language: Option<String>,
-
-    #[serde(alias = "Sec-Fetch-Mode")]
-    pub sec_fetch_mode: Option<String>,
-}
-
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
-#[serde(deny_unknown_fields)]
-pub struct Fragment {
-    pub duration: Option<f64>,
-    pub fragment_count: Option<usize>,
-    pub path: Option<PathBuf>,
-    pub url: Option<Url>,
-}
-
-impl InfoJson {
-    pub fn to_py_dict(self, py: Python<'_>) -> PyResult<Bound<'_, PyDict>> {
-        let output: Bound<'_, PyDict> = json_loads_str(py, self)?;
-        Ok(output)
-    }
-}
diff --git a/crates/yt_dlp/src/wrapper/mod.rs b/crates/yt_dlp/src/wrapper/mod.rs
deleted file mode 100644
index 3fe3247..0000000
--- a/crates/yt_dlp/src/wrapper/mod.rs
+++ /dev/null
@@ -1,12 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-pub mod info_json;
-// pub mod yt_dlp_options;
diff --git a/crates/yt_dlp/src/wrapper/yt_dlp_options.rs b/crates/yt_dlp/src/wrapper/yt_dlp_options.rs
deleted file mode 100644
index 25595b5..0000000
--- a/crates/yt_dlp/src/wrapper/yt_dlp_options.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use pyo3::{Bound, PyResult, Python, types::PyDict};
-use serde::Serialize;
-
-use crate::json_loads;
-
-#[derive(Serialize, Clone)]
-pub struct YtDlpOptions {
-    pub playliststart: u32,
-    pub playlistend: u32,
-    pub noplaylist: bool,
-    pub extract_flat: ExtractFlat,
-    // pub extractor_args: ExtractorArgs,
-    // pub format: String,
-    // pub fragment_retries: u32,
-    // #[serde(rename(serialize = "getcomments"))]
-    // pub get_comments: bool,
-    // #[serde(rename(serialize = "ignoreerrors"))]
-    // pub ignore_errors: bool,
-    // pub retries: u32,
-    // #[serde(rename(serialize = "writeinfojson"))]
-    // pub write_info_json: bool,
-    // pub postprocessors: Vec<serde_json::Map<String, serde_json::Value>>,
-}
-
-#[derive(Serialize, Copy, Clone)]
-pub enum ExtractFlat {
-    #[serde(rename(serialize = "in_playlist"))]
-    InPlaylist,
-
-    #[serde(rename(serialize = "discard_in_playlist"))]
-    DiscardInPlaylist,
-}
-
-#[derive(Serialize, Clone)]
-pub struct ExtractorArgs {
-    pub youtube: YoutubeExtractorArgs,
-}
-
-#[derive(Serialize, Clone)]
-pub struct YoutubeExtractorArgs {
-    comment_sort: Vec<String>,
-    max_comments: Vec<String>,
-}
-
-impl YtDlpOptions {
-    pub fn to_py_dict(self, py: Python) -> PyResult<Bound<PyDict>> {
-        let string = serde_json::to_string(&self).expect("This should always work");
-
-        let output: Bound<PyDict> = json_loads(py, string)?;
-        Ok(output)
-    }
-}