diff --git a/tests/data/test_image.png b/tests/data/test_image.png new file mode 100644 index 0000000..324cd81 Binary files /dev/null and b/tests/data/test_image.png differ diff --git a/tests/data/test_image_ref.png b/tests/data/test_image_ref.png new file mode 100644 index 0000000..e28a4f6 Binary files /dev/null and b/tests/data/test_image_ref.png differ diff --git a/tests/test_upscaler.py b/tests/test_upscaler.py new file mode 100644 index 0000000..699acd4 --- /dev/null +++ b/tests/test_upscaler.py @@ -0,0 +1,55 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +import ctypes +import multiprocessing +import os +from pathlib import Path + +import utils +from PIL import Image + +from video2x import Upscaler, Video2X + + +def test_upscaling(): + video2x = Video2X() + output_path = Path("data/test_video_output.mp4") + video2x.upscale( + Path("data/test_video.mp4"), + output_path, + None, + 720, + 3, + 5, + 0, + "waifu2x", + ) + output_path.unlink() + + +def test_upscale_image(): + + # initialize upscaler instance + processing_queue = multiprocessing.Queue(maxsize=30) + processed_frames = multiprocessing.Manager().list([None]) + pause = multiprocessing.Value(ctypes.c_bool, False) + upscaler = Upscaler(processing_queue, processed_frames, pause) + + image = Image.open("data/test_image.png") + upscaled_image = upscaler.upscale_image(image, 1680, 960, "waifu2x", 3) + + reference_image = Image.open("data/test_image_ref.png") + assert utils.get_image_diff(upscaled_image, reference_image) < 0.5 + + +def test_get_scaling_tasks(): + dimensions = [320, 240, 3840, 2160] + + for algorithm, correct_answer in [ + ("waifu2x", [2, 2, 2, 2]), + ["srmd", [3, 4]], + ("realsr", [4, 4]), + ("realcugan", [3, 4]), + ]: + assert Upscaler._get_scaling_tasks(*dimensions, algorithm) == correct_answer diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..88c289c --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from PIL import Image, ImageChops, ImageStat + + +def get_image_diff(image0: Image.Image, image1: Image.Image) -> float: + """ + calculate the percentage of differences between two images + + :param image0 Image.Image: the first frame + :param image1 Image.Image: the second frame + :rtype float: the percent difference between the two images + """ + difference = ImageChops.difference(image0, image1) + difference_stat = ImageStat.Stat(difference) + percent_diff = sum(difference_stat.mean) / (len(difference_stat.mean) * 255) * 100 + return percent_diff diff --git a/video2x/decoder.py b/video2x/decoder.py index 809de73..c0eb47a 100755 --- a/video2x/decoder.py +++ b/video2x/decoder.py @@ -92,7 +92,7 @@ class VideoDecoder(threading.Thread): ), overwrite_output=True, ), - env={"AV_LOG_FORCE_COLOR": "TRUE"}, + env=dict(AV_LOG_FORCE_COLOR="TRUE", **os.environ), stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE, diff --git a/video2x/encoder.py b/video2x/encoder.py index 6b53c7f..bba4319 100755 --- a/video2x/encoder.py +++ b/video2x/encoder.py @@ -129,7 +129,7 @@ class VideoEncoder(threading.Thread): ), overwrite_output=True, ), - env={"AV_LOG_FORCE_COLOR": "TRUE"}, + env=dict(AV_LOG_FORCE_COLOR="TRUE", **os.environ), stdin=subprocess.PIPE, stderr=subprocess.PIPE, )