mirror of
https://github.com/k4yt3x/video2x.git
synced 2026-02-13 08:34:44 +08:00
feat(rife): add support for frame interpolation and RIFE (#1244)
* feat: add RIFE files and processor/interpolator abstractions * feat: add `rife` as processor option * feat: add frame interpolation math except first frame * feat: complete motion interpolation and add scene detection * feat: improve Vulkan device validation * fix: fix casting issues and variable names * refactor: improve error-checking; add abstractions and factories * refactor: improve readability of the frames processor * docs: update changelog Signed-off-by: k4yt3x <i@k4yt3x.com>
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
#include "avutils.h"
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/pixdesc.h>
|
||||
@@ -7,6 +9,25 @@ extern "C" {
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include "conversions.h"
|
||||
|
||||
AVRational get_video_frame_rate(AVFormatContext *ifmt_ctx, int in_vstream_idx) {
|
||||
AVRational frame_rate = ifmt_ctx->streams[in_vstream_idx]->avg_frame_rate;
|
||||
if (frame_rate.num == 0 && frame_rate.den == 0) {
|
||||
frame_rate = ifmt_ctx->streams[in_vstream_idx]->r_frame_rate;
|
||||
}
|
||||
if (frame_rate.num == 0 && frame_rate.den == 0) {
|
||||
frame_rate = av_guess_frame_rate(ifmt_ctx, ifmt_ctx->streams[in_vstream_idx], nullptr);
|
||||
}
|
||||
if (frame_rate.num == 0 && frame_rate.den == 0) {
|
||||
frame_rate = ifmt_ctx->streams[in_vstream_idx]->time_base;
|
||||
}
|
||||
if (frame_rate.num == 0 && frame_rate.den == 0) {
|
||||
spdlog::warn("Unable to determine the video's frame rate");
|
||||
}
|
||||
return frame_rate;
|
||||
}
|
||||
|
||||
int64_t get_video_frame_count(AVFormatContext *ifmt_ctx, int in_vstream_idx) {
|
||||
// Use the 'nb_frames' field if it is available
|
||||
int64_t nb_frames = ifmt_ctx->streams[in_vstream_idx]->nb_frames;
|
||||
@@ -31,19 +52,7 @@ int64_t get_video_frame_count(AVFormatContext *ifmt_ctx, int in_vstream_idx) {
|
||||
spdlog::debug("Video duration: {}s", duration_secs);
|
||||
|
||||
// Calculate average FPS
|
||||
double fps = av_q2d(ifmt_ctx->streams[in_vstream_idx]->avg_frame_rate);
|
||||
if (fps <= 0) {
|
||||
spdlog::debug("Unable to read the average frame rate from 'avg_frame_rate'");
|
||||
fps = av_q2d(ifmt_ctx->streams[in_vstream_idx]->r_frame_rate);
|
||||
}
|
||||
if (fps <= 0) {
|
||||
spdlog::debug("Unable to read the average frame rate from 'r_frame_rate'");
|
||||
fps = av_q2d(av_guess_frame_rate(ifmt_ctx, ifmt_ctx->streams[in_vstream_idx], nullptr));
|
||||
}
|
||||
if (fps <= 0) {
|
||||
spdlog::debug("Unable to estimate the average frame rate with 'av_guess_frame_rate'");
|
||||
fps = av_q2d(ifmt_ctx->streams[in_vstream_idx]->time_base);
|
||||
}
|
||||
double fps = av_q2d(get_video_frame_rate(ifmt_ctx, in_vstream_idx));
|
||||
if (fps <= 0) {
|
||||
spdlog::warn("Unable to estimate the video's average frame rate");
|
||||
return -1;
|
||||
@@ -122,3 +131,58 @@ get_encoder_default_pix_fmt(const AVCodec *encoder, AVPixelFormat target_pix_fmt
|
||||
|
||||
return best_pix_fmt;
|
||||
}
|
||||
|
||||
float get_frame_diff(AVFrame *frame1, AVFrame *frame2) {
|
||||
if (!frame1 || !frame2) {
|
||||
spdlog::error("Invalid frame(s) provided for comparison");
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
if (frame1->width != frame2->width || frame1->height != frame2->height) {
|
||||
spdlog::error("Frame dimensions do not match");
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
int width = frame1->width;
|
||||
int height = frame1->height;
|
||||
|
||||
// Convert both frames to the target pixel format using the provided function
|
||||
AVPixelFormat target_pix_fmt = AV_PIX_FMT_RGB24;
|
||||
AVFrame *rgb_frame1 = convert_avframe_pix_fmt(frame1, target_pix_fmt);
|
||||
AVFrame *rgb_frame2 = convert_avframe_pix_fmt(frame2, target_pix_fmt);
|
||||
|
||||
if (!rgb_frame1 || !rgb_frame2) {
|
||||
spdlog::error("Failed to convert frames to target pixel format");
|
||||
if (rgb_frame1) {
|
||||
av_frame_free(&rgb_frame1);
|
||||
}
|
||||
if (rgb_frame2) {
|
||||
av_frame_free(&rgb_frame2);
|
||||
}
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
uint64_t sum_diff = 0;
|
||||
uint64_t max_diff = 0;
|
||||
|
||||
// Calculate difference pixel by pixel
|
||||
for (int y = 0; y < height; y++) {
|
||||
uint8_t *ptr1 = rgb_frame1->data[0] + y * rgb_frame1->linesize[0];
|
||||
uint8_t *ptr2 = rgb_frame2->data[0] + y * rgb_frame2->linesize[0];
|
||||
for (int x = 0; x < width * 3; x++) {
|
||||
sum_diff += static_cast<uint64_t>(
|
||||
std::abs(static_cast<int>(ptr1[x]) - static_cast<int>(ptr2[x]))
|
||||
);
|
||||
max_diff += 255;
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up
|
||||
av_frame_free(&rgb_frame1);
|
||||
av_frame_free(&rgb_frame2);
|
||||
|
||||
// Calculate percentage difference
|
||||
float percent_diff = (static_cast<float>(sum_diff) / static_cast<float>(max_diff)) * 100.0f;
|
||||
|
||||
return percent_diff;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user