From da111f197f106b08de825a6dabd7c7f71db18bb6 Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Sat, 6 Dec 2025 10:30:43 -0700 Subject: [PATCH 01/11] Improve analyzer error handling and database retry logic - Change Pipeline.process_single_video to return {path, reason} error tuples for detailed error messages - Add retry_state_transition helper with exponential backoff (10 retries, 50-500ms delays) for database lock handling - Skip state transitions if video already in target/advanced state to prevent invalid transition errors - Fix .iex.exs to use list_configs() and remove non-existent start/pause functions - Extract mark_video_as_analyzed helper to satisfy Credo nesting depth requirements --- .iex.exs | 9 +-- lib/reencodarr/analyzer/broadway.ex | 120 ++++++++++++++++++++++------ 2 files changed, 98 insertions(+), 31 deletions(-) diff --git a/.iex.exs b/.iex.exs index 4b246567..9f16547b 100644 --- a/.iex.exs +++ b/.iex.exs @@ -49,17 +49,13 @@ defmodule IExHelpers do @doc "Start all pipelines" def start_all do - Analyzer.start() CrfSearcher.start() - Encoder.start() pipelines_status() end @doc "Pause all pipelines" def pause_all do - Analyzer.pause() CrfSearcher.pause() - Encoder.pause() pipelines_status() end @@ -71,7 +67,7 @@ defmodule IExHelpers do @doc "Get service configs" def configs do - Services.list_services() + Services.list_configs() end @doc "Quick video state summary" @@ -91,8 +87,7 @@ defmodule IExHelpers do %{ video: video, vmaf_count: length(video.vmafs), - chosen_vmaf: Enum.find(video.vmafs, & &1.chosen), - service: Services.get_service_by_id(video.service_id) + chosen_vmaf: Enum.find(video.vmafs, & &1.chosen) } end end diff --git a/lib/reencodarr/analyzer/broadway.ex b/lib/reencodarr/analyzer/broadway.ex index 0c3adb39..c7c2297f 100644 --- a/lib/reencodarr/analyzer/broadway.ex +++ b/lib/reencodarr/analyzer/broadway.ex @@ -595,47 +595,79 @@ defmodule Reencodarr.Analyzer.Broadway do # Applies the processing decision by updating video state in the database defp apply_processing_decision(video, {:skip_encoding, reason}) do - Logger.debug("Video #{video.path} #{reason}, marking as encoded (skipping all processing)") - - case Media.mark_as_encoded(video) do - {:ok, updated_video} -> - Logger.debug( - "Successfully marked as encoded: #{video.path}, video_id: #{updated_video.id}, state: #{updated_video.state}" - ) - - {:ok, updated_video} + # Check if video is already encoded - if so, no need to transition + if video.state == :encoded do + Logger.debug( + "Video #{video.path} already in encoded state, skipping transition (reason: #{reason})" + ) - {:error, changeset_error} -> - Logger.error("Failed to mark as encoded for #{video.path}: #{inspect(changeset_error)}") + {:ok, video} + else + Logger.debug("Video #{video.path} #{reason}, marking as encoded (skipping all processing)") - {:ok, video} - end - end + result = + retry_state_transition( + fn -> Media.mark_as_encoded(video) end, + video.path + ) - defp apply_processing_decision(video, {:needs_encoding, _reason}) do - # Validate video has required fields before marking as analyzed - if has_required_mediainfo_fields?(video) do - case Media.mark_as_analyzed(video) do + case result do {:ok, updated_video} -> Logger.debug( - "Successfully marked as analyzed: #{video.path}, video_id: #{updated_video.id}, state: #{updated_video.state}" + "Successfully marked as encoded: #{video.path}, video_id: #{updated_video.id}, state: #{updated_video.state}" ) {:ok, updated_video} - {:error, changeset_error} -> + {:error, error} -> Logger.error( - "Failed to mark as analyzed for #{video.path}: #{inspect(changeset_error)}" + "Failed to mark as encoded for #{video.path}: #{inspect(error)}" ) {:ok, video} end - else - Logger.error( - "Cannot mark video #{video.path} as analyzed - missing required fields (bitrate: #{video.bitrate}, width: #{video.width}, height: #{video.height})" + end + end + + defp apply_processing_decision(video, {:needs_encoding, _reason}) do + # Skip if video is already past analysis stage + if video.state in [:analyzed, :crf_searching, :crf_searched, :encoding, :encoded] do + Logger.debug( + "Video #{video.path} already in state #{video.state}, skipping analysis transition" ) {:ok, video} + else + # Validate video has required fields before marking as analyzed + if has_required_mediainfo_fields?(video) do + result = + retry_state_transition( + fn -> Media.mark_as_analyzed(video) end, + video.path + ) + + case result do + {:ok, updated_video} -> + Logger.debug( + "Successfully marked as analyzed: #{video.path}, video_id: #{updated_video.id}, state: #{updated_video.state}" + ) + + {:ok, updated_video} + + {:error, error} -> + Logger.error( + "Failed to mark as analyzed for #{video.path}: #{inspect(error)}" + ) + + {:ok, video} + end + else + Logger.error( + "Cannot mark video #{video.path} as analyzed - missing required fields (bitrate: #{video.bitrate}, width: #{video.width}, height: #{video.height})" + ) + + {:ok, video} + end end end @@ -754,4 +786,44 @@ defmodule Reencodarr.Analyzer.Broadway do # Return empty list to indicate failure - calling code should handle this [] end + + # Retry state transition with exponential backoff for database busy errors + defp retry_state_transition(fun, video_path, max_retries \\ 10) do + retry_state_transition(fun, video_path, max_retries, 0) + end + + defp retry_state_transition(fun, video_path, max_retries, attempt) + when attempt < max_retries do + fun.() + rescue + error in [Exqlite.Error] -> + case error.message do + "Database busy" -> + # Exponential backoff with max cap at 5 seconds + base_wait = (:math.pow(2, attempt) * 50) |> round() + wait_time = min(base_wait, 5_000) + + Logger.debug( + "Database busy updating #{video_path} on attempt #{attempt + 1}/#{max_retries}, retrying in #{wait_time}ms" + ) + + Process.sleep(wait_time) + retry_state_transition(fun, video_path, max_retries, attempt + 1) + + _ -> + {:error, error} + end + + other_error -> + {:error, other_error} + end + + defp retry_state_transition(_fun, video_path, max_retries, attempt) + when attempt >= max_retries do + Logger.error( + "Failed to update state for #{video_path} after #{max_retries} attempts due to database busy" + ) + + {:error, :database_busy} + end end From 5bd58a0ec8b90d7d473650e00dc46355f652d64b Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Tue, 9 Dec 2025 17:42:10 -0700 Subject: [PATCH 02/11] Support MP4 output format when input is MP4 - Add output_extension/1 helper to determine output format based on input - MP4 inputs now encode to MP4, all others default to MKV - Update regex patterns to match both .mkv and .mp4 extensions - Update progress parser to handle dynamic extensions - All tests passing (877 tests, 0 failures) --- lib/reencodarr/ab_av1/encode.ex | 17 +++++++- lib/reencodarr/ab_av1/output_parser.ex | 2 +- lib/reencodarr/ab_av1/progress_parser.ex | 6 +-- lib/reencodarr/analyzer/broadway.ex | 54 ++++++++++++------------ 4 files changed, 46 insertions(+), 33 deletions(-) diff --git a/lib/reencodarr/ab_av1/encode.ex b/lib/reencodarr/ab_av1/encode.ex index 34a4b91e..842267af 100644 --- a/lib/reencodarr/ab_av1/encode.ex +++ b/lib/reencodarr/ab_av1/encode.ex @@ -272,12 +272,23 @@ defmodule Reencodarr.AbAv1.Encode do end # Private Helper Functions + + # Determine output extension based on input file + # MP4 files output as MP4, everything else outputs as MKV + defp output_extension(video_path) do + case Path.extname(video_path) |> String.downcase() do + ".mp4" -> ".mp4" + _ -> ".mkv" + end + end + defp prepare_encode_state(vmaf, state) do # Mark video as encoding BEFORE starting the port to prevent duplicate dispatches case Media.mark_as_encoding(vmaf.video) do {:ok, _updated_video} -> args = build_encode_args(vmaf) - output_file = Path.join(Helper.temp_dir(), "#{vmaf.video.id}.mkv") + ext = output_extension(vmaf.video.path) + output_file = Path.join(Helper.temp_dir(), "#{vmaf.video.id}#{ext}") port = Helper.open_port(args) @@ -309,12 +320,14 @@ defmodule Reencodarr.AbAv1.Encode do end defp build_encode_args(vmaf) do + ext = output_extension(vmaf.video.path) + base_args = [ "encode", "--crf", to_string(vmaf.crf), "--output", - Path.join(Helper.temp_dir(), "#{vmaf.video.id}.mkv"), + Path.join(Helper.temp_dir(), "#{vmaf.video.id}#{ext}"), "--input", vmaf.video.path ] diff --git a/lib/reencodarr/ab_av1/output_parser.ex b/lib/reencodarr/ab_av1/output_parser.ex index 8862bb98..c7d70c45 100644 --- a/lib/reencodarr/ab_av1/output_parser.ex +++ b/lib/reencodarr/ab_av1/output_parser.ex @@ -35,7 +35,7 @@ defmodule Reencodarr.AbAv1.OutputParser do ~r/\[(?[^\]]+)\].*?(?\d+(?:\.\d+)?)%,\s(?\d+(?:\.\d+)?)\sfps?,\seta\s(?\d+)\s(?second|minute|hour|day|week|month|year)s?/, success: ~r/(?:\[.*\]\s)?crf\s(?\d+(?:\.\d+)?)\ssuccessful/, warning: ~r/^Warning:\s(?.*)/, - encoding_start: ~r/\[.*\] encoding (?\d+\.mkv)/, + encoding_start: ~r/\[.*\] encoding (?\d+\.(?:mkv|mp4))/, encoding_progress: ~r/\[.*\]\s*(?\d+)%,\s*(?[\d\.]+)\s*fps,\s*eta\s*(?\d+)\s*(?minutes|seconds|hours|days|weeks|months|years)/, encoding_progress_alt: diff --git a/lib/reencodarr/ab_av1/progress_parser.ex b/lib/reencodarr/ab_av1/progress_parser.ex index 521a9e19..44537bdb 100644 --- a/lib/reencodarr/ab_av1/progress_parser.ex +++ b/lib/reencodarr/ab_av1/progress_parser.ex @@ -57,7 +57,7 @@ defmodule Reencodarr.AbAv1.ProgressParser do case Process.get(:progress_parser_patterns) do nil -> patterns = %{ - encoding_start: ~r/\[.*\] encoding (?\d+\.mkv)/, + encoding_start: ~r/\[.*\] encoding (?\d+\.(?:mkv|mp4))/, # Main progress pattern with brackets: [timestamp] percent%, fps fps, eta time unit progress: ~r/\[(?[^\]]+)\].*?(?\d+(?:\.\d+)?)%,\s(?\d+(?:\.\d+)?)\sfps?,?\s?eta\s(?\d+)\s(?(?:second|minute|hour|day|week|month|year)s?)/, @@ -99,10 +99,10 @@ defmodule Reencodarr.AbAv1.ProgressParser do end defp handle_encoding_start(%{"filename" => filename_with_ext}, state) do - # Extract video ID from filename (e.g., "123.mkv" -> get original filename) + # Extract video ID from filename (e.g., "123.mkv" or "123.mp4" -> get original filename) video_id = filename_with_ext - |> Path.basename(".mkv") + |> Path.basename(Path.extname(filename_with_ext)) |> Parsers.parse_int(0) filename = get_filename_for_encoding_start(state, video_id, filename_with_ext) diff --git a/lib/reencodarr/analyzer/broadway.ex b/lib/reencodarr/analyzer/broadway.ex index c7c2297f..077066ce 100644 --- a/lib/reencodarr/analyzer/broadway.ex +++ b/lib/reencodarr/analyzer/broadway.ex @@ -620,9 +620,7 @@ defmodule Reencodarr.Analyzer.Broadway do {:ok, updated_video} {:error, error} -> - Logger.error( - "Failed to mark as encoded for #{video.path}: #{inspect(error)}" - ) + Logger.error("Failed to mark as encoded for #{video.path}: #{inspect(error)}") {:ok, video} end @@ -638,36 +636,38 @@ defmodule Reencodarr.Analyzer.Broadway do {:ok, video} else - # Validate video has required fields before marking as analyzed - if has_required_mediainfo_fields?(video) do - result = - retry_state_transition( - fn -> Media.mark_as_analyzed(video) end, - video.path - ) + mark_video_as_analyzed(video) + end + end - case result do - {:ok, updated_video} -> - Logger.debug( - "Successfully marked as analyzed: #{video.path}, video_id: #{updated_video.id}, state: #{updated_video.state}" - ) + defp mark_video_as_analyzed(video) do + # Validate video has required fields before marking as analyzed + if has_required_mediainfo_fields?(video) do + result = + retry_state_transition( + fn -> Media.mark_as_analyzed(video) end, + video.path + ) - {:ok, updated_video} + case result do + {:ok, updated_video} -> + Logger.debug( + "Successfully marked as analyzed: #{video.path}, video_id: #{updated_video.id}, state: #{updated_video.state}" + ) - {:error, error} -> - Logger.error( - "Failed to mark as analyzed for #{video.path}: #{inspect(error)}" - ) + {:ok, updated_video} - {:ok, video} - end - else - Logger.error( - "Cannot mark video #{video.path} as analyzed - missing required fields (bitrate: #{video.bitrate}, width: #{video.width}, height: #{video.height})" - ) + {:error, error} -> + Logger.error("Failed to mark as analyzed for #{video.path}: #{inspect(error)}") - {:ok, video} + {:ok, video} end + else + Logger.error( + "Cannot mark video #{video.path} as analyzed - missing required fields (bitrate: #{video.bitrate}, width: #{video.width}, height: #{video.height})" + ) + + {:ok, video} end end From bd95ceeb26376edfa3217897bbb0bc0886f179cd Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Wed, 10 Dec 2025 11:31:00 -0700 Subject: [PATCH 03/11] Fix double mediainfo extraction causing MP4 analysis failures The batch mediainfo processor was extracting the 'media' key twice: 1. Once in process_videos_with_mediainfo (line 157) 2. Again in process_video_with_mediainfo (line 179-184) This caused the validate_mediainfo check to fail because it was looking for a 'track' key in already-extracted media data, resulting in 'invalid video info' errors for MP4 files. Fixed by removing the premature extraction on line 157 and letting process_video_with_mediainfo handle the structure properly. --- lib/reencodarr/analyzer/processing/pipeline.ex | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/lib/reencodarr/analyzer/processing/pipeline.ex b/lib/reencodarr/analyzer/processing/pipeline.ex index c677679c..bfdd91b5 100644 --- a/lib/reencodarr/analyzer/processing/pipeline.ex +++ b/lib/reencodarr/analyzer/processing/pipeline.ex @@ -152,13 +152,8 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do video_infos |> Task.async_stream( fn video_info -> - # Extract the "media" portion from the MediaInfo result - mediainfo = - case Map.get(mediainfo_map, video_info.path, :no_mediainfo) do - :no_mediainfo -> :no_mediainfo - result when is_map(result) -> Map.get(result, "media", :no_mediainfo) - _ -> :no_mediainfo - end + # Get the full MediaInfo result (already includes "media" key) + mediainfo = Map.get(mediainfo_map, video_info.path, :no_mediainfo) process_video_with_mediainfo(video_info, mediainfo) end, From a5103382fe8d11d3d55d71e1ba6abdacc2fea7da Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Wed, 10 Dec 2025 15:24:11 -0700 Subject: [PATCH 04/11] Remove MP4 file skip restrictions Now that MP4 encoding output format is properly supported, remove the temporary skip logic that was blocking MP4 files from: - File validation in analyzer pipeline - Encode queue processing MP4 files will now be analyzed and encoded like any other video format. --- lib/reencodarr/ab_av1.ex | 29 ------------------- .../analyzer/core/file_operations.ex | 13 +++------ 2 files changed, 4 insertions(+), 38 deletions(-) diff --git a/lib/reencodarr/ab_av1.ex b/lib/reencodarr/ab_av1.ex index 88660b60..d5e69e8d 100644 --- a/lib/reencodarr/ab_av1.ex +++ b/lib/reencodarr/ab_av1.ex @@ -68,35 +68,6 @@ defmodule Reencodarr.AbAv1 do """ @spec encode(Media.Vmaf.t()) :: :ok | {:error, atom()} def encode(vmaf) do - # Skip MP4 files - compatibility issues to be resolved later - video_id = Map.get(vmaf, :video_id) || Map.get(vmaf, "video_id") - - if is_integer(video_id) do - try do - video = Media.get_video!(video_id) - - if is_binary(video.path) and String.ends_with?(video.path, ".mp4") do - # Skip MP4 files - compatibility issues - Logger.info("Skipping encode for MP4 file (compatibility issues): #{video.path}") - # Mark as failed to skip future encoding attempts - case Media.mark_as_failed(video) do - {:ok, _updated} -> :ok - error -> error - end - else - do_queue_encode(vmaf) - end - rescue - Ecto.NoResultsError -> - # Video doesn't exist - fall back to normal validation/queuing - do_queue_encode(vmaf) - end - else - do_queue_encode(vmaf) - end - end - - defp do_queue_encode(vmaf) do case QueueManager.validate_encode_request(vmaf) do {:ok, validated_vmaf} -> message = QueueManager.build_encode_message(validated_vmaf) diff --git a/lib/reencodarr/analyzer/core/file_operations.ex b/lib/reencodarr/analyzer/core/file_operations.ex index cbc5a8b9..62e197ab 100644 --- a/lib/reencodarr/analyzer/core/file_operations.ex +++ b/lib/reencodarr/analyzer/core/file_operations.ex @@ -114,15 +114,10 @@ defmodule Reencodarr.Analyzer.Core.FileOperations do end defp validate_file_accessibility(path, %{exists: true}) do - # Skip MP4 files - compatibility issues to be resolved later - if String.ends_with?(path, ".mp4") do - {:error, "MP4 files skipped (compatibility issues)"} - else - # Additional checks can be added here (permissions, file type, etc.) - case File.stat(path, [:read]) do - {:ok, _file_stat} -> :ok - {:error, reason} -> {:error, "file not accessible: #{path} (#{reason})"} - end + # Additional checks can be added here (permissions, file type, etc.) + case File.stat(path, [:read]) do + {:ok, _file_stat} -> :ok + {:error, reason} -> {:error, "file not accessible: #{path} (#{reason})"} end end From d0d7803d67c7bc6cc05bb637afeb652b081ecc33 Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Wed, 10 Dec 2025 15:27:15 -0700 Subject: [PATCH 05/11] Improve error messages and fix test warnings - Replace generic 'invalid video info' with actual validation error reasons - Refactor filter_valid_videos to preserve error details for each file - Fix unused variable warnings in media_test.exs --- .../analyzer/processing/pipeline.ex | 34 ++++++++++++------- test/reencodarr/media_test.exs | 4 +-- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/lib/reencodarr/analyzer/processing/pipeline.ex b/lib/reencodarr/analyzer/processing/pipeline.ex index bfdd91b5..b1195e7c 100644 --- a/lib/reencodarr/analyzer/processing/pipeline.ex +++ b/lib/reencodarr/analyzer/processing/pipeline.ex @@ -35,13 +35,13 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do Logger.debug("Processing batch of #{length(video_infos)} videos") # Pre-filter valid files for better performance - {valid_videos, invalid_videos} = filter_valid_videos(video_infos) + {valid_videos, invalid_videos_with_errors} = filter_valid_videos(video_infos) # Process valid videos with batch MediaInfo fetching case process_valid_videos(valid_videos, context) do {:ok, processed_videos} -> - # Combine results - all_results = processed_videos ++ mark_invalid_videos(invalid_videos) + # Combine results - invalid videos now include their actual error reasons + all_results = processed_videos ++ mark_invalid_videos(invalid_videos_with_errors) {:ok, all_results} error -> @@ -107,12 +107,22 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do paths = Enum.map(video_infos, & &1.path) validation_results = FileOperations.validate_files_for_processing(paths) - Enum.split_with(video_infos, fn video_info -> - case Map.get(validation_results, video_info.path) do - {:ok, _stats} -> true - _ -> false - end - end) + # Split into valid videos and invalid videos with their error reasons + {valid, invalid_with_errors} = + Enum.reduce(video_infos, {[], []}, fn video_info, {valid_acc, invalid_acc} -> + case Map.get(validation_results, video_info.path) do + {:ok, _stats} -> + {[video_info | valid_acc], invalid_acc} + + {:error, reason} -> + {valid_acc, [{video_info, reason} | invalid_acc]} + + nil -> + {valid_acc, [{video_info, "validation result not found"} | invalid_acc]} + end + end) + + {Enum.reverse(valid), Enum.reverse(invalid_with_errors)} end defp process_valid_videos([], _context), do: {:ok, []} @@ -198,9 +208,9 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do {:error, {video_info.path, error_msg}} end - defp mark_invalid_videos(invalid_videos) do - Enum.map(invalid_videos, fn video_info -> - {:error, {video_info.path, "invalid video info"}} + defp mark_invalid_videos(invalid_videos_with_errors) do + Enum.map(invalid_videos_with_errors, fn {video_info, reason} -> + {:error, {video_info.path, reason}} end) end diff --git a/test/reencodarr/media_test.exs b/test/reencodarr/media_test.exs index eab23791..f65c97d1 100644 --- a/test/reencodarr/media_test.exs +++ b/test/reencodarr/media_test.exs @@ -2316,7 +2316,7 @@ defmodule Reencodarr.MediaTest do state: :analyzed }) - result = Media.reset_videos_with_invalid_audio_metadata() + _result = Media.reset_videos_with_invalid_audio_metadata() # Atmos video should NOT be reset (atmos: true condition prevents it) updated = Repo.get(Reencodarr.Media.Video, video.id) @@ -2607,7 +2607,7 @@ defmodule Reencodarr.MediaTest do test "query_videos_ready_for_encoding/1 respects limit" do # Create multiple videos with chosen VMAFs - videos = + _videos = Enum.map(1..5, fn i -> {:ok, v} = Fixtures.video_fixture(%{state: :crf_searched}) From dfe19051805db0efb7b148bee282dfebd9d70a62 Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Wed, 10 Dec 2025 15:32:26 -0700 Subject: [PATCH 06/11] Auto-cleanup missing and empty files during analysis When file validation detects issues, handle them immediately: - Missing files: Delete the video record from the database - Empty files: Delete the empty file, delete the video record, and trigger a rescan in Sonarr/Radarr so they can re-download This cleanup happens early in the pipeline before any other processing, ensuring we don't waste time on files that can't be processed. --- lib/reencodarr/analyzer/broadway.ex | 12 +- .../analyzer/processing/pipeline.ex | 168 +++++++++++++++++- 2 files changed, 168 insertions(+), 12 deletions(-) diff --git a/lib/reencodarr/analyzer/broadway.ex b/lib/reencodarr/analyzer/broadway.ex index 077066ce..bf433c66 100644 --- a/lib/reencodarr/analyzer/broadway.ex +++ b/lib/reencodarr/analyzer/broadway.ex @@ -705,21 +705,11 @@ defmodule Reencodarr.Analyzer.Broadway do {:ok, video} -> # Record detailed failure information based on reason record_analysis_failure(video, reason) - Logger.debug("Successfully recorded analysis failure for video #{video.id}") :ok {:error, :not_found} -> - Logger.warning("Video not found in database, deleting orphan file: #{path}") - - case File.rm(path) do - :ok -> - Logger.info("Successfully deleted orphan file: #{path}") - - {:error, reason} -> - Logger.error("Failed to delete orphan file #{path}: #{inspect(reason)}") - end - + Logger.warning("Video not found in database for path: #{path}") :ok end end diff --git a/lib/reencodarr/analyzer/processing/pipeline.ex b/lib/reencodarr/analyzer/processing/pipeline.ex index b1195e7c..339565a3 100644 --- a/lib/reencodarr/analyzer/processing/pipeline.ex +++ b/lib/reencodarr/analyzer/processing/pipeline.ex @@ -10,11 +10,13 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do - MediaInfo integration and validation - Error handling and recovery strategies - Performance monitoring integration + - Automatic cleanup of missing/empty files """ require Logger alias Reencodarr.Analyzer.{Core.ConcurrencyManager, Core.FileOperations} alias Reencodarr.Analyzer.MediaInfo.CommandExecutor + alias Reencodarr.{Media, Services} alias Reencodarr.Media.MediaInfoExtractor # Type definitions for better type safety @@ -107,7 +109,7 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do paths = Enum.map(video_infos, & &1.path) validation_results = FileOperations.validate_files_for_processing(paths) - # Split into valid videos and invalid videos with their error reasons + # Split into valid videos and handle invalid ones immediately {valid, invalid_with_errors} = Enum.reduce(video_infos, {[], []}, fn video_info, {valid_acc, invalid_acc} -> case Map.get(validation_results, video_info.path) do @@ -115,6 +117,8 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do {[video_info | valid_acc], invalid_acc} {:error, reason} -> + # Handle cleanup immediately for missing/empty files + handle_invalid_file(video_info, reason) {valid_acc, [{video_info, reason} | invalid_acc]} nil -> @@ -125,6 +129,168 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do {Enum.reverse(valid), Enum.reverse(invalid_with_errors)} end + defp handle_invalid_file(video_info, reason) do + cond do + String.contains?(reason, "does not exist") -> + handle_missing_file(video_info) + + String.contains?(reason, "is empty") -> + handle_empty_file(video_info) + + true -> + # Other errors don't need special handling here + :ok + end + end + + defp handle_missing_file(video_info) do + Logger.info("File does not exist, deleting video record: #{video_info.path}") + + case Media.get_video(video_info.id) do + nil -> + Logger.debug("Video #{video_info.id} already deleted") + + video -> + case Media.delete_video_with_vmafs(video) do + {:ok, _} -> + Logger.info("Successfully deleted video record for missing file: #{video_info.path}") + + {:error, reason} -> + Logger.error( + "Failed to delete video record for #{video_info.path}: #{inspect(reason)}" + ) + end + end + end + + defp handle_empty_file(video_info) do + Logger.warning("File is empty, cleaning up: #{video_info.path}") + + # Delete the empty file + case File.rm(video_info.path) do + :ok -> + Logger.info("Successfully deleted empty file: #{video_info.path}") + + {:error, reason} -> + Logger.error("Failed to delete empty file #{video_info.path}: #{inspect(reason)}") + end + + # Get video record and trigger rescan before deleting + case Media.get_video(video_info.id) do + nil -> + Logger.debug("Video #{video_info.id} already deleted") + + video -> + # Trigger rescan in Sonarr/Radarr before deleting the record + trigger_service_rescan(video) + + # Delete the video record + case Media.delete_video_with_vmafs(video) do + {:ok, _} -> + Logger.info("Successfully deleted video record for empty file: #{video_info.path}") + + {:error, reason} -> + Logger.error( + "Failed to delete video record for #{video_info.path}: #{inspect(reason)}" + ) + end + end + end + + defp trigger_service_rescan(video) do + case video.service_type do + :sonarr -> + trigger_sonarr_rescan(video) + + :radarr -> + trigger_radarr_rescan(video) + + nil -> + Logger.debug("No service type for video #{video.id}, skipping rescan") + + other -> + Logger.warning("Unknown service type #{inspect(other)} for video #{video.id}") + end + end + + defp trigger_sonarr_rescan(video) do + case video.service_id do + nil -> + Logger.warning("No service_id for Sonarr video #{video.id}, cannot trigger rescan") + + service_id -> + case Integer.parse(service_id) do + {episode_file_id, ""} -> + do_sonarr_rescan(episode_file_id) + + _ -> + Logger.warning("Invalid service_id for Sonarr video: #{service_id}") + end + end + end + + defp do_sonarr_rescan(episode_file_id) do + case Services.Sonarr.get_episode_file(episode_file_id) do + {:ok, %{body: %{"seriesId" => series_id}}} when is_integer(series_id) -> + Logger.info("Triggering Sonarr rescan for series #{series_id}") + + case Services.Sonarr.refresh_series(series_id) do + {:ok, _} -> + Logger.info("Successfully triggered Sonarr rescan for series #{series_id}") + + {:error, reason} -> + Logger.error("Failed to trigger Sonarr rescan: #{inspect(reason)}") + end + + {:ok, response} -> + Logger.warning( + "Could not extract series ID from episode file response: #{inspect(response)}" + ) + + {:error, reason} -> + Logger.error("Failed to get episode file info from Sonarr: #{inspect(reason)}") + end + end + + defp trigger_radarr_rescan(video) do + case video.service_id do + nil -> + Logger.warning("No service_id for Radarr video #{video.id}, cannot trigger rescan") + + service_id -> + case Integer.parse(service_id) do + {movie_file_id, ""} -> + do_radarr_rescan(movie_file_id) + + _ -> + Logger.warning("Invalid service_id for Radarr video: #{service_id}") + end + end + end + + defp do_radarr_rescan(movie_file_id) do + case Services.Radarr.get_movie_file(movie_file_id) do + {:ok, %{body: %{"movieId" => movie_id}}} when is_integer(movie_id) -> + Logger.info("Triggering Radarr rescan for movie #{movie_id}") + + case Services.Radarr.refresh_movie(movie_id) do + {:ok, _} -> + Logger.info("Successfully triggered Radarr rescan for movie #{movie_id}") + + {:error, reason} -> + Logger.error("Failed to trigger Radarr rescan: #{inspect(reason)}") + end + + {:ok, response} -> + Logger.warning( + "Could not extract movie ID from movie file response: #{inspect(response)}" + ) + + {:error, reason} -> + Logger.error("Failed to get movie file info from Radarr: #{inspect(reason)}") + end + end + defp process_valid_videos([], _context), do: {:ok, []} @spec process_valid_videos([video_info()], processing_context()) :: processing_result() From 6653973ce85dba5dd09484ba01b23eed4279ceb1 Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Wed, 10 Dec 2025 15:35:36 -0700 Subject: [PATCH 07/11] Apply film grain to vintage HDR content Vintage films (pre-2009) that have been remastered to HDR still benefit from film grain synthesis to preserve the authentic film aesthetic. Previously, grain was only applied to SDR vintage content. Now it applies to both SDR and HDR content from before 2009. --- lib/reencodarr/rules.ex | 9 +++++---- test/reencodarr/rules_test.exs | 8 ++++---- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/lib/reencodarr/rules.ex b/lib/reencodarr/rules.ex index 9ef26c38..63ae2f05 100644 --- a/lib/reencodarr/rules.ex +++ b/lib/reencodarr/rules.ex @@ -373,10 +373,11 @@ defmodule Reencodarr.Rules do For movies: uses release year For TV shows: uses series start year or episode air year - Only applies to non-HDR content as HDR typically doesn't need grain synthesis. + Applies to both SDR and HDR content - vintage films that have been remastered + to HDR still benefit from film grain synthesis to preserve the original look. """ @spec grain_for_vintage_content(Media.Video.t()) :: list() - def grain_for_vintage_content(%Media.Video{hdr: nil, content_year: year} = video) + def grain_for_vintage_content(%Media.Video{content_year: year} = video) when is_integer(year) and year < 2009 do strength = 8 @@ -387,7 +388,7 @@ defmodule Reencodarr.Rules do [{"--svt", "film-grain=#{strength}"}] end - def grain_for_vintage_content(%Media.Video{hdr: nil, path: path, title: title}) do + def grain_for_vintage_content(%Media.Video{path: path, title: title}) do strength = 8 # Fallback to filename parsing for non-API sourced videos full_text = "#{path} #{title || ""}" @@ -405,7 +406,7 @@ defmodule Reencodarr.Rules do end end - # Skip grain for HDR content or when no pattern detected + # Fallback when no year info available def grain_for_vintage_content(_), do: [] @doc """ diff --git a/test/reencodarr/rules_test.exs b/test/reencodarr/rules_test.exs index 4e591e0a..7d730fcb 100644 --- a/test/reencodarr/rules_test.exs +++ b/test/reencodarr/rules_test.exs @@ -622,7 +622,7 @@ defmodule Reencodarr.RulesTest do assert result == [] end - test "does not apply grain for HDR content even if vintage" do + test "applies grain for HDR content if vintage" do video = Fixtures.create_hdr_video(%{ path: "/movies/Blade Runner (2007)/movie.mkv", @@ -631,7 +631,7 @@ defmodule Reencodarr.RulesTest do result = Rules.grain_for_vintage_content(video) - assert result == [] + assert result == [{"--svt", "film-grain=8"}] end test "does not apply grain when no year pattern is found" do @@ -874,11 +874,11 @@ defmodule Reencodarr.RulesTest do assert result == [] end - test "skips grain for HDR content even if vintage" do + test "applies grain for HDR vintage content" do video = Fixtures.create_test_video(%{content_year: 2005, hdr: "HDR10"}) result = Rules.grain_for_vintage_content(video) - assert result == [] + assert result == [{"--svt", "film-grain=8"}] end test "skips grain when no year detected" do From 38c07a492b0a994daf23611b5a3ec9fa4424316b Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Thu, 11 Dec 2025 10:27:07 -0700 Subject: [PATCH 08/11] Fix service_id type mismatch in refresh_and_rename_from_video The service_id field on videos is stored as a string, but the Sonarr/Radarr API functions expect integers. This was causing the refresh and rename operations to fail silently after encoding completed. Now properly parses string service_id to integer before calling the API, with error handling for invalid IDs and missing service type/id. --- lib/reencodarr/sync.ex | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/lib/reencodarr/sync.ex b/lib/reencodarr/sync.ex index 24a3e4ce..171dc113 100644 --- a/lib/reencodarr/sync.ex +++ b/lib/reencodarr/sync.ex @@ -370,12 +370,34 @@ defmodule Reencodarr.Sync do end end - def refresh_and_rename_from_video(%{service_type: :sonarr, service_id: id}), + def refresh_and_rename_from_video(%{service_type: :sonarr, service_id: id}) + when is_binary(id) do + case Integer.parse(id) do + {int_id, ""} -> refresh_operations(int_id, :sonarr) + _ -> {:error, "Invalid service_id: #{id}"} + end + end + + def refresh_and_rename_from_video(%{service_type: :sonarr, service_id: id}) when is_integer(id), do: refresh_operations(id, :sonarr) - def refresh_and_rename_from_video(%{service_type: :radarr, service_id: id}), + def refresh_and_rename_from_video(%{service_type: :radarr, service_id: id}) + when is_binary(id) do + case Integer.parse(id) do + {int_id, ""} -> refresh_operations(int_id, :radarr) + _ -> {:error, "Invalid service_id: #{id}"} + end + end + + def refresh_and_rename_from_video(%{service_type: :radarr, service_id: id}) when is_integer(id), do: refresh_operations(id, :radarr) + def refresh_and_rename_from_video(%{service_type: nil}), + do: {:error, "No service type for video"} + + def refresh_and_rename_from_video(%{service_id: nil}), + do: {:error, "No service_id for video"} + def rescan_and_rename_series(id), do: refresh_operations(id, :sonarr) # Helper function to validate series ID from episode file response From 0ff780b7ad48114b1238682ed4cb7adaaea94f35 Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Thu, 11 Dec 2025 10:35:49 -0700 Subject: [PATCH 09/11] Prevent performance monitor logging with empty analyzer queue When no videos have been processed recently, the performance monitor was still logging throughput statistics based on stale data within the 2-minute window. Now it checks for recent activity before attempting to adjust or log performance metrics, silently resetting the timer when the queue is idle. --- .../analyzer/broadway/performance_monitor.ex | 34 +++++++++++++------ 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/lib/reencodarr/analyzer/broadway/performance_monitor.ex b/lib/reencodarr/analyzer/broadway/performance_monitor.ex index b89c838f..4de176ae 100644 --- a/lib/reencodarr/analyzer/broadway/performance_monitor.ex +++ b/lib/reencodarr/analyzer/broadway/performance_monitor.ex @@ -277,18 +277,32 @@ defmodule Reencodarr.Analyzer.Broadway.PerformanceMonitor do time_since_last = current_time - state.last_adjustment # Only process if we have enough data and auto-tuning is enabled - if length(state.throughput_history) >= 3 and time_since_last >= @adjustment_interval do - avg_throughput = calculate_average_throughput(state.throughput_history) + cond do + length(state.throughput_history) < 3 or time_since_last < @adjustment_interval -> + state - # Only log and adjust if there's been recent activity (throughput > 0) - if avg_throughput > 0 do - adjust_based_on_throughput(state, avg_throughput, current_time) - else - # No recent activity, just reset the timer - %{state | last_adjustment: current_time} - end + not has_recent_activity?(state.throughput_history, current_time) -> + # No recent activity, clear stale history and reset timer silently + %{state | last_adjustment: current_time, throughput_history: []} + + true -> + maybe_adjust_with_throughput(state, current_time) + end + end + + defp has_recent_activity?(throughput_history, current_time) do + recent_cutoff = current_time - @adjustment_interval + Enum.any?(throughput_history, fn {timestamp, _} -> timestamp > recent_cutoff end) + end + + defp maybe_adjust_with_throughput(state, current_time) do + avg_throughput = calculate_average_throughput(state.throughput_history) + + if avg_throughput > 0 do + adjust_based_on_throughput(state, avg_throughput, current_time) else - state + # No meaningful throughput, just reset the timer + %{state | last_adjustment: current_time} end end From e81cfc112a75c51b4cea43800fe8dc0086b82534 Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Thu, 11 Dec 2025 10:52:06 -0700 Subject: [PATCH 10/11] Update dependencies to latest versions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - credo: 1.7.13 → 1.7.14 - ecto_sql: 3.13.2 → 3.13.3 - phoenix: 1.8.1 → 1.8.3 - phoenix_live_reload: 1.6.1 → 1.6.2 - phoenix_live_view: 1.1.17 → 1.1.18 - swoosh: 1.19.8 → 1.19.9 - plug: 1.18.1 → 1.19.1 (transitive) Also fixes new credo warnings for using length/1 when checking for empty lists - replaced with Enum.empty? or pattern matching. --- lib/reencodarr/analyzer/broadway.ex | 10 +++++----- .../analyzer/broadway/performance_monitor.ex | 10 ++++------ lib/reencodarr/analyzer/core/file_stat_cache.ex | 5 +++-- .../analyzer/media_info/command_executor.ex | 2 +- lib/reencodarr/analyzer/mediainfo_cache.ex | 7 ++++--- lib/reencodarr/analyzer/processing/pipeline.ex | 2 +- lib/reencodarr/failure_reporting.ex | 2 +- lib/reencodarr/failure_tracker.ex | 6 +++--- lib/reencodarr/media/video/media_info.ex | 6 +++--- lib/reencodarr/media/video_state_machine.ex | 8 ++++---- lib/reencodarr/rules.ex | 2 +- lib/reencodarr/sync.ex | 6 +++--- lib/reencodarr_web/live/failures_live.ex | 6 +++--- mix.exs | 12 ++++++------ mix.lock | 14 +++++++------- test/reencodarr/encoder/audio_args_test.exs | 6 +++--- .../failure_tracker_command_output_test.exs | 2 +- test/reencodarr/media_property_test.exs | 2 +- test/reencodarr/media_test.exs | 16 ++++++++-------- test/reencodarr/rules_test.exs | 4 ++-- test/reencodarr/sync_performance_test.exs | 2 +- 21 files changed, 65 insertions(+), 65 deletions(-) diff --git a/lib/reencodarr/analyzer/broadway.ex b/lib/reencodarr/analyzer/broadway.ex index bf433c66..c382e607 100644 --- a/lib/reencodarr/analyzer/broadway.ex +++ b/lib/reencodarr/analyzer/broadway.ex @@ -389,7 +389,11 @@ defmodule Reencodarr.Analyzer.Broadway do ) # Only proceed with upsert if we have successful videos - if length(successful_videos) > 0 do + if Enum.empty?(successful_videos) do + Logger.debug("Broadway: No successful videos to upsert") + # Still handle any failed paths + log_processing_summary([], failed_paths ++ additional_failed_paths) + else # Extract video attributes from successful videos video_attrs_list = Enum.map(successful_videos, fn {_video_info, attrs} -> attrs end) @@ -405,10 +409,6 @@ defmodule Reencodarr.Analyzer.Broadway do Logger.error("Broadway: perform_batch_upsert failed: #{inspect(reason)}") {:error, reason} end - else - Logger.debug("Broadway: No successful videos to upsert") - # Still handle any failed paths - log_processing_summary([], failed_paths ++ additional_failed_paths) end Logger.debug("Broadway: batch_upsert_and_transition_videos completed") diff --git a/lib/reencodarr/analyzer/broadway/performance_monitor.ex b/lib/reencodarr/analyzer/broadway/performance_monitor.ex index 4de176ae..40907a6f 100644 --- a/lib/reencodarr/analyzer/broadway/performance_monitor.ex +++ b/lib/reencodarr/analyzer/broadway/performance_monitor.ex @@ -369,13 +369,11 @@ defmodule Reencodarr.Analyzer.Broadway.PerformanceMonitor do Logger.warning("Failed to send context update to producer: #{inspect(error)}") end + defp calculate_average_throughput([]), do: 0 + defp calculate_average_throughput(history) do - if length(history) > 0 do - total = Enum.reduce(history, 0, fn {_time, throughput}, acc -> acc + throughput end) - total / length(history) - else - 0 - end + total = Enum.reduce(history, 0, fn {_time, throughput}, acc -> acc + throughput end) + total / length(history) end defp calculate_throughput(batch_size, duration_ms) when duration_ms > 0, diff --git a/lib/reencodarr/analyzer/core/file_stat_cache.ex b/lib/reencodarr/analyzer/core/file_stat_cache.ex index aa64f49b..35fc8f55 100644 --- a/lib/reencodarr/analyzer/core/file_stat_cache.ex +++ b/lib/reencodarr/analyzer/core/file_stat_cache.ex @@ -153,9 +153,10 @@ defmodule Reencodarr.Analyzer.Core.FileStatCache do end) new_timers = Map.drop(state.cache_timers, expired_keys) + expired_count = length(expired_keys) - if length(expired_keys) > 0 do - Logger.debug("FileStatCache: Cleaned up #{length(expired_keys)} expired entries") + if expired_count > 0 do + Logger.debug("FileStatCache: Cleaned up #{expired_count} expired entries") end {:noreply, %{state | cache: active_cache, cache_timers: new_timers}} diff --git a/lib/reencodarr/analyzer/media_info/command_executor.ex b/lib/reencodarr/analyzer/media_info/command_executor.ex index 1f6c1cbc..63a0a8fc 100644 --- a/lib/reencodarr/analyzer/media_info/command_executor.ex +++ b/lib/reencodarr/analyzer/media_info/command_executor.ex @@ -27,7 +27,7 @@ defmodule Reencodarr.Analyzer.MediaInfo.CommandExecutor do Automatically optimizes batch size and concurrency based on system capabilities. """ @spec execute_batch_mediainfo([String.t()]) :: {:ok, map()} | {:error, term()} - def execute_batch_mediainfo(paths) when is_list(paths) and length(paths) > 0 do + def execute_batch_mediainfo([_ | _] = paths) do Logger.debug("Executing MediaInfo for #{length(paths)} files") # Pre-filter existing files to avoid command errors diff --git a/lib/reencodarr/analyzer/mediainfo_cache.ex b/lib/reencodarr/analyzer/mediainfo_cache.ex index 07558ab7..18e4e5a5 100644 --- a/lib/reencodarr/analyzer/mediainfo_cache.ex +++ b/lib/reencodarr/analyzer/mediainfo_cache.ex @@ -492,10 +492,11 @@ defmodule Reencodarr.Analyzer.MediaInfoCache do # Remove expired entries from cache active_cache = Map.drop(state.cache, expired_paths) - new_cache_size = state.cache_size - length(expired_paths) + expired_count = length(expired_paths) + new_cache_size = state.cache_size - expired_count - if length(expired_paths) > 0 do - Logger.debug("MediaInfoCache: Cleaned up #{length(expired_paths)} expired entries") + if expired_count > 0 do + Logger.debug("MediaInfoCache: Cleaned up #{expired_count} expired entries") end # Enforce size limits by evicting LRU entries if needed diff --git a/lib/reencodarr/analyzer/processing/pipeline.ex b/lib/reencodarr/analyzer/processing/pipeline.ex index 339565a3..c18c249d 100644 --- a/lib/reencodarr/analyzer/processing/pipeline.ex +++ b/lib/reencodarr/analyzer/processing/pipeline.ex @@ -407,7 +407,7 @@ defmodule Reencodarr.Analyzer.Processing.Pipeline do # Record failures properly through the failure system if we have them # For now, log summary - ideally we'd have video structs to record individual failures - if length(failed) > 0 do + if not Enum.empty?(failed) do Logger.warning( "Batch processing completed with #{length(failed)} failures: #{inspect(failed)}" ) diff --git a/lib/reencodarr/failure_reporting.ex b/lib/reencodarr/failure_reporting.ex index 85d54b15..8e1f6bb3 100644 --- a/lib/reencodarr/failure_reporting.ex +++ b/lib/reencodarr/failure_reporting.ex @@ -289,7 +289,7 @@ defmodule Reencodarr.FailureReporting do IO.puts("") # By Stage - if length(report.by_stage) > 0 do + if not Enum.empty?(report.by_stage) do IO.puts(IO.ANSI.bright() <> "Failures by Stage:" <> IO.ANSI.reset()) Enum.each(report.by_stage, fn stage -> diff --git a/lib/reencodarr/failure_tracker.ex b/lib/reencodarr/failure_tracker.ex index ba22aae7..8b9732db 100644 --- a/lib/reencodarr/failure_tracker.ex +++ b/lib/reencodarr/failure_tracker.ex @@ -316,11 +316,11 @@ defmodule Reencodarr.FailureTracker do defp enhance_message_with_ffmpeg_details(output, base_message, ffmpeg_exit_code) do specific_errors = extract_specific_ffmpeg_errors(output) - if length(specific_errors) > 0 do + if Enum.empty?(specific_errors) do + "#{base_message} (FFmpeg exit #{ffmpeg_exit_code})" + else error_details = Enum.join(specific_errors, "; ") "#{base_message} (FFmpeg exit #{ffmpeg_exit_code}): #{error_details}" - else - "#{base_message} (FFmpeg exit #{ffmpeg_exit_code})" end end diff --git a/lib/reencodarr/media/video/media_info.ex b/lib/reencodarr/media/video/media_info.ex index 691d1ee7..d7dacbba 100644 --- a/lib/reencodarr/media/video/media_info.ex +++ b/lib/reencodarr/media/video/media_info.ex @@ -238,10 +238,10 @@ defmodule Reencodarr.Media.Video.MediaInfo do is_nil(track.channels) or track.channels == 0 end) - if length(invalid_audio_tracks) > 0 do - add_error(changeset, :audio_tracks, "audio tracks must have valid channel information") - else + if Enum.empty?(invalid_audio_tracks) do changeset + else + add_error(changeset, :audio_tracks, "audio tracks must have valid channel information") end end diff --git a/lib/reencodarr/media/video_state_machine.ex b/lib/reencodarr/media/video_state_machine.ex index e6bd3955..252529cc 100644 --- a/lib/reencodarr/media/video_state_machine.ex +++ b/lib/reencodarr/media/video_state_machine.ex @@ -215,12 +215,12 @@ defmodule Reencodarr.Media.VideoStateMachine do defp validate_codecs_present(changeset) do changeset |> validate_change(:video_codecs, fn :video_codecs, codecs -> - if is_list(codecs) and length(codecs) > 0, + if is_list(codecs) and not Enum.empty?(codecs), do: [], else: [video_codecs: "must have at least one codec"] end) |> validate_change(:audio_codecs, fn :audio_codecs, codecs -> - if is_list(codecs) and length(codecs) > 0, + if is_list(codecs) and not Enum.empty?(codecs), do: [], else: [audio_codecs: "must have at least one codec"] end) @@ -249,8 +249,8 @@ defmodule Reencodarr.Media.VideoStateMachine do end defp video_codecs_valid?(%Video{video_codecs: video_codecs, audio_codecs: audio_codecs}) do - is_list(video_codecs) and length(video_codecs) > 0 and - is_list(audio_codecs) and length(audio_codecs) > 0 + is_list(video_codecs) and not Enum.empty?(video_codecs) and + is_list(audio_codecs) and not Enum.empty?(audio_codecs) end defp has_vmaf_data?(%Video{} = _video) do diff --git a/lib/reencodarr/rules.ex b/lib/reencodarr/rules.ex index 63ae2f05..15589a03 100644 --- a/lib/reencodarr/rules.ex +++ b/lib/reencodarr/rules.ex @@ -264,7 +264,7 @@ defmodule Reencodarr.Rules do [] not (is_integer(channels) and channels > 0) or - not (is_list(audio_codecs) and length(audio_codecs) > 0) -> + not (is_list(audio_codecs) and not Enum.empty?(audio_codecs)) -> Logger.debug( "🔴 Invalid audio metadata for video #{video.id}: channels=#{inspect(channels)}, codecs=#{inspect(audio_codecs)}, path=#{video.path}" ) diff --git a/lib/reencodarr/sync.ex b/lib/reencodarr/sync.ex index 171dc113..ae975bbc 100644 --- a/lib/reencodarr/sync.ex +++ b/lib/reencodarr/sync.ex @@ -89,11 +89,11 @@ defmodule Reencodarr.Sync do # Process all files in a single batch operation files_processed = - if length(all_files) > 0 do + if Enum.empty?(all_files) do + 0 + else batch_upsert_videos(all_files, service_type) length(all_files) - else - 0 end # Log batch performance metrics diff --git a/lib/reencodarr_web/live/failures_live.ex b/lib/reencodarr_web/live/failures_live.ex index 522b446b..cddd7ab8 100644 --- a/lib/reencodarr_web/live/failures_live.ex +++ b/lib/reencodarr_web/live/failures_live.ex @@ -460,7 +460,7 @@ defmodule ReencodarrWeb.FailuresLive do <%= case Map.get(@video_failures, video.id) do %> - <% failures when is_list(failures) and length(failures) > 0 -> %> + <% failures when is_list(failures) and failures != [] -> %> <% latest_failure = List.first(failures) %>
@@ -525,7 +525,7 @@ defmodule ReencodarrWeb.FailuresLive do <%= case Map.get(@video_failures, video.id) do %> - <% failures when is_list(failures) and length(failures) > 0 -> %> + <% failures when is_list(failures) and failures != [] -> %>
All Failures ({length(failures)}) @@ -831,7 +831,7 @@ defmodule ReencodarrWeb.FailuresLive do end # Helper function to check if a query has GROUP BY clause - defp has_group_by?(%Ecto.Query{group_bys: group_bys}), do: length(group_bys) > 0 + defp has_group_by?(%Ecto.Query{group_bys: group_bys}), do: not Enum.empty?(group_bys) defp get_failures_by_video(videos) do video_ids = Enum.map(videos, & &1.id) diff --git a/mix.exs b/mix.exs index 83848a0d..8d381531 100644 --- a/mix.exs +++ b/mix.exs @@ -45,10 +45,10 @@ defmodule Reencodarr.MixProject do {:broadway_dashboard, "~> 0.4.0"}, {:car_req, tag: "0.3.1", github: "carsdotcom/car_req"}, {:contex, "~> 0.5.0"}, - {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, + {:credo, "~> 1.7.14", only: [:dev, :test], runtime: false}, {:dialyxir, "~> 1.1", only: [:dev], runtime: false}, {:dns_cluster, "~> 0.2.0"}, - {:ecto_sql, "~> 3.10"}, + {:ecto_sql, "~> 3.13.3"}, {:esbuild, "~> 0.8", runtime: Mix.env() == :dev}, {:finch, "~> 0.13"}, {:floki, ">= 0.30.0", only: :test}, @@ -65,16 +65,16 @@ defmodule Reencodarr.MixProject do {:jason, "~> 1.2"}, {:logger_backends, "~> 1.0"}, {:logger_file_backend, "~> 0.0.13"}, - {:phoenix, "~> 1.8"}, + {:phoenix, "~> 1.8.3"}, {:phoenix_ecto, "~> 4.5"}, {:phoenix_html, "~> 4.1"}, {:phoenix_live_dashboard, "~> 0.8.3"}, - {:phoenix_live_reload, "~> 1.2", only: :dev}, - {:phoenix_live_view, "~> 1.1"}, + {:phoenix_live_reload, "~> 1.6.2", only: :dev}, + {:phoenix_live_view, "~> 1.1.18"}, {:ecto_sqlite3, "~> 0.17"}, {:req, "~> 0.5"}, {:stream_data, "~> 1.2.0", only: :test}, - {:swoosh, "~> 1.5"}, + {:swoosh, "~> 1.19.9"}, {:tailwind, "~> 0.2", runtime: Mix.env() == :dev}, {:telemetry_metrics, "~> 1.0"}, {:telemetry_poller, "~> 1.0"}, diff --git a/mix.lock b/mix.lock index ecb38e95..84390c98 100644 --- a/mix.lock +++ b/mix.lock @@ -9,13 +9,13 @@ "certifi": {:hex, :certifi, "2.15.0", "0e6e882fcdaaa0a5a9f2b3db55b1394dba07e8d6d9bcad08318fb604c6839712", [:rebar3], [], "hexpm", "b147ed22ce71d72eafdad94f055165c1c182f61a2ff49df28bcc71d1d5b94a60"}, "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"}, "contex": {:hex, :contex, "0.5.0", "5d8a6defbeb41f54adfcb0f85c4756d4f2b84aa5b0d809d45a5d2e90d91d0392", [:mix], [{:nimble_strftime, "~> 0.1.0", [hex: :nimble_strftime, repo: "hexpm", optional: false]}], "hexpm", "b7497a1790324d84247859df44ba4bcf2489d9bba1812a5375b2f2046b9e6fd7"}, - "credo": {:hex, :credo, "1.7.13", "126a0697df6b7b71cd18c81bc92335297839a806b6f62b61d417500d1070ff4e", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "47641e6d2bbff1e241e87695b29f617f1a8f912adea34296fb10ecc3d7e9e84f"}, + "credo": {:hex, :credo, "1.7.14", "c7e75216cea8d978ba8c60ed9dede4cc79a1c99a266c34b3600dd2c33b96bc92", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "12a97d6bb98c277e4fb1dff45aaf5c137287416009d214fb46e68147bd9e0203"}, "db_connection": {:hex, :db_connection, "2.8.1", "9abdc1e68c34c6163f6fb96a96532272d13ad7ca45262156ae8b7ec6d9dc4bec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a61a3d489b239d76f326e03b98794fb8e45168396c925ef25feb405ed09da8fd"}, "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"}, "dialyxir": {:hex, :dialyxir, "1.4.7", "dda948fcee52962e4b6c5b4b16b2d8fa7d50d8645bbae8b8685c3f9ecb7f5f4d", [:mix], [{:erlex, ">= 0.2.8", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b34527202e6eb8cee198efec110996c25c5898f43a4094df157f8d28f27d9efe"}, "dns_cluster": {:hex, :dns_cluster, "0.2.0", "aa8eb46e3bd0326bd67b84790c561733b25c5ba2fe3c7e36f28e88f384ebcb33", [:mix], [], "hexpm", "ba6f1893411c69c01b9e8e8f772062535a4cf70f3f35bcc964a324078d8c8240"}, "ecto": {:hex, :ecto, "3.13.5", "9d4a69700183f33bf97208294768e561f5c7f1ecf417e0fa1006e4a91713a834", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "df9efebf70cf94142739ba357499661ef5dbb559ef902b68ea1f3c1fabce36de"}, - "ecto_sql": {:hex, :ecto_sql, "3.13.2", "a07d2461d84107b3d037097c822ffdd36ed69d1cf7c0f70e12a3d1decf04e2e1", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "539274ab0ecf1a0078a6a72ef3465629e4d6018a3028095dc90f60a19c371717"}, + "ecto_sql": {:hex, :ecto_sql, "3.13.3", "81f7067dd1951081888529002dbc71f54e5e891b69c60195040ea44697e1104a", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5751caea36c8f5dd0d1de6f37eceffea19d10bd53f20e5bbe31c45f2efc8944a"}, "ecto_sqlite3": {:hex, :ecto_sqlite3, "0.22.0", "edab2d0f701b7dd05dcf7e2d97769c106aff62b5cfddc000d1dd6f46b9cbd8c3", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.13.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:exqlite, "~> 0.22", [hex: :exqlite, repo: "hexpm", optional: false]}], "hexpm", "5af9e031bffcc5da0b7bca90c271a7b1e7c04a93fecf7f6cd35bc1b1921a64bd"}, "elixir_make": {:hex, :elixir_make, "0.9.0", "6484b3cd8c0cee58f09f05ecaf1a140a8c97670671a6a0e7ab4dc326c3109726", [:mix], [], "hexpm", "db23d4fd8b757462ad02f8aa73431a426fe6671c80b200d9710caf3d1dd0ffdb"}, "erlex": {:hex, :erlex, "0.2.8", "cd8116f20f3c0afe376d1e8d1f0ae2452337729f68be016ea544a72f767d9c12", [:mix], [], "hexpm", "9d66ff9fedf69e49dc3fd12831e12a8a37b76f8651dd21cd45fcf5561a8a7590"}, @@ -47,22 +47,22 @@ "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"}, "nimble_strftime": {:hex, :nimble_strftime, "0.1.1", "b988184d1bd945bc139b2c27dd00a6c0774ec94f6b0b580083abd62d5d07818b", [:mix], [], "hexpm", "89e599c9b8b4d1203b7bb5c79eb51ef7c6a28fbc6228230b312f8b796310d755"}, "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, - "phoenix": {:hex, :phoenix, "1.8.1", "865473a60a979551a4879db79fbfb4503e41cd809e77c85af79716578b6a456d", [:mix], [{:bandit, "~> 1.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "84d77d2b2e77c3c7e7527099bd01ef5c8560cd149c036d6b3a40745f11cd2fb2"}, + "phoenix": {:hex, :phoenix, "1.8.3", "49ac5e485083cb1495a905e47eb554277bdd9c65ccb4fc5100306b350151aa95", [:mix], [{:bandit, "~> 1.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "36169f95cc2e155b78be93d9590acc3f462f1e5438db06e6248613f27c80caec"}, "phoenix_ecto": {:hex, :phoenix_ecto, "4.7.0", "75c4b9dfb3efdc42aec2bd5f8bccd978aca0651dbcbc7a3f362ea5d9d43153c6", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "1d75011e4254cb4ddf823e81823a9629559a1be93b4321a6a5f11a5306fbf4cc"}, "phoenix_html": {:hex, :phoenix_html, "4.3.0", "d3577a5df4b6954cd7890c84d955c470b5310bb49647f0a114a6eeecc850f7ad", [:mix], [], "hexpm", "3eaa290a78bab0f075f791a46a981bbe769d94bc776869f4f3063a14f30497ad"}, "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.7", "405880012cb4b706f26dd1c6349125bfc903fb9e44d1ea668adaf4e04d4884b7", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "3a8625cab39ec261d48a13b7468dc619c0ede099601b084e343968309bd4d7d7"}, - "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.6.1", "05df733a09887a005ed0d69a7fc619d376aea2730bf64ce52ac51ce716cc1ef0", [:mix], [{:file_system, "~> 0.2.10 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "74273843d5a6e4fef0bbc17599f33e3ec63f08e69215623a0cd91eea4288e5a0"}, - "phoenix_live_view": {:hex, :phoenix_live_view, "1.1.17", "1d782b5901cf13b137c6d8c56542ff6cb618359b2adca7e185b21df728fa0c6c", [:mix], [{:igniter, ">= 0.6.16 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:lazy_html, "~> 0.1.0", [hex: :lazy_html, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0 or ~> 1.8.0-rc", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "fa82307dd9305657a8236d6b48e60ef2e8d9f742ee7ed832de4b8bcb7e0e5ed2"}, + "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.6.2", "b18b0773a1ba77f28c52decbb0f10fd1ac4d3ae5b8632399bbf6986e3b665f62", [:mix], [{:file_system, "~> 0.2.10 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "d1f89c18114c50d394721365ffb428cce24f1c13de0467ffa773e2ff4a30d5b9"}, + "phoenix_live_view": {:hex, :phoenix_live_view, "1.1.18", "b5410017b3d4edf261d9c98ebc334e0637d7189457c730720cfc13e206443d43", [:mix], [{:igniter, ">= 0.6.16 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:lazy_html, "~> 0.1.0", [hex: :lazy_html, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0 or ~> 1.8.0-rc", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f189b759595feff0420e9a1d544396397f9cf9e2d5a8cb98ba5b6cab01927da0"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.2.0", "ff3a5616e1bed6804de7773b92cbccfc0b0f473faf1f63d7daf1206c7aeaaa6f", [:mix], [], "hexpm", "adc313a5bf7136039f63cfd9668fde73bba0765e0614cba80c06ac9460ff3e96"}, "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, - "plug": {:hex, :plug, "1.18.1", "5067f26f7745b7e31bc3368bc1a2b818b9779faa959b49c934c17730efc911cf", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "57a57db70df2b422b564437d2d33cf8d33cd16339c1edb190cd11b1a3a546cc2"}, + "plug": {:hex, :plug, "1.19.1", "09bac17ae7a001a68ae393658aa23c7e38782be5c5c00c80be82901262c394c0", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "560a0017a8f6d5d30146916862aaf9300b7280063651dd7e532b8be168511e62"}, "plug_crypto": {:hex, :plug_crypto, "2.1.1", "19bda8184399cb24afa10be734f84a16ea0a2bc65054e23a62bb10f06bc89491", [:mix], [], "hexpm", "6470bce6ffe41c8bd497612ffde1a7e4af67f36a15eea5f921af71cf3e11247c"}, "postgrex": {:hex, :postgrex, "0.21.1", "2c5cc830ec11e7a0067dd4d623c049b3ef807e9507a424985b8dcf921224cd88", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "27d8d21c103c3cc68851b533ff99eef353e6a0ff98dc444ea751de43eb48bdac"}, "req": {:hex, :req, "0.5.16", "99ba6a36b014458e52a8b9a0543bfa752cb0344b2a9d756651db1281d4ba4450", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "974a7a27982b9b791df84e8f6687d21483795882a7840e8309abdbe08bb06f09"}, "req_fuse": {:hex, :req_fuse, "0.3.2", "8f96b26527deefe3d128496c058a23014754a569d12d281905d4c9e56bc3bae2", [:mix], [{:fuse, ">= 2.4.0", [hex: :fuse, repo: "hexpm", optional: false]}, {:req, ">= 0.4.14", [hex: :req, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "55cf642c03f10aed0dc4f97adc10f0985b355b377d2bc32bb0c569d82f3aa07e"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, "stream_data": {:hex, :stream_data, "1.2.0", "58dd3f9e88afe27dc38bef26fce0c84a9e7a96772b2925c7b32cd2435697a52b", [:mix], [], "hexpm", "eb5c546ee3466920314643edf68943a5b14b32d1da9fe01698dc92b73f89a9ed"}, - "swoosh": {:hex, :swoosh, "1.19.8", "0576f2ea96d1bb3a6e02cc9f79cbd7d497babc49a353eef8dce1a1f9f82d7915", [:mix], [{:bandit, ">= 1.0.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:idna, "~> 6.0", [hex: :idna, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mua, "~> 0.2.3", [hex: :mua, repo: "hexpm", optional: true]}, {:multipart, "~> 0.4", [hex: :multipart, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.5.10 or ~> 0.6 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d7503c2daf0f9899afd8eba9923eeddef4b62e70816e1d3b6766e4d6c60e94ad"}, + "swoosh": {:hex, :swoosh, "1.19.9", "4eb2c471b8cf06adbdcaa1d57a0ad53c0ed9348ce8586a06cc491f9f0dbcb553", [:mix], [{:bandit, ">= 1.0.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:idna, "~> 6.0", [hex: :idna, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mua, "~> 0.2.3", [hex: :mua, repo: "hexpm", optional: true]}, {:multipart, "~> 0.4", [hex: :multipart, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.5.10 or ~> 0.6 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "516898263a64925c31723c56bc7999a26e97b04e869707f681f4c9bca7ee1688"}, "tailwind": {:hex, :tailwind, "0.4.1", "e7bcc222fe96a1e55f948e76d13dd84a1a7653fb051d2a167135db3b4b08d3e9", [:mix], [], "hexpm", "6249d4f9819052911120dbdbe9e532e6bd64ea23476056adb7f730aa25c220d1"}, "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, "telemetry_metrics": {:hex, :telemetry_metrics, "1.1.0", "5bd5f3b5637e0abea0426b947e3ce5dd304f8b3bc6617039e2b5a008adc02f8f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e7b79e8ddfde70adb6db8a6623d1778ec66401f366e9a8f5dd0955c56bc8ce67"}, diff --git a/test/reencodarr/encoder/audio_args_test.exs b/test/reencodarr/encoder/audio_args_test.exs index e900eb98..ae7927ef 100644 --- a/test/reencodarr/encoder/audio_args_test.exs +++ b/test/reencodarr/encoder/audio_args_test.exs @@ -113,7 +113,7 @@ defmodule Reencodarr.Encoder.AudioArgsTest do refute "--acodec" in args # Should still include video args - assert length(args) > 0 + assert not Enum.empty?(args) end end @@ -140,7 +140,7 @@ defmodule Reencodarr.Encoder.AudioArgsTest do # Should include arguments appropriate for stereo assert is_list(args) - assert length(args) > 0 + assert not Enum.empty?(args) end test "handles Atmos audio correctly" do @@ -165,7 +165,7 @@ defmodule Reencodarr.Encoder.AudioArgsTest do # Should handle Atmos appropriately assert is_list(args) - assert length(args) > 0 + assert not Enum.empty?(args) end end end diff --git a/test/reencodarr/failure_tracker_command_output_test.exs b/test/reencodarr/failure_tracker_command_output_test.exs index 339e0dfc..765a7bda 100644 --- a/test/reencodarr/failure_tracker_command_output_test.exs +++ b/test/reencodarr/failure_tracker_command_output_test.exs @@ -228,7 +228,7 @@ defmodule Reencodarr.FailureTracker.CommandOutputTest do # Verify the tested_scores are stored as maps, not tuples assert is_list(failure.system_context["tested_scores"]) - if length(failure.system_context["tested_scores"]) > 0 do + if not Enum.empty?(failure.system_context["tested_scores"]) do first_score = List.first(failure.system_context["tested_scores"]) assert is_map(first_score) assert Map.has_key?(first_score, "crf") diff --git a/test/reencodarr/media_property_test.exs b/test/reencodarr/media_property_test.exs index 5e4a7571..bed6b690 100644 --- a/test/reencodarr/media_property_test.exs +++ b/test/reencodarr/media_property_test.exs @@ -237,7 +237,7 @@ defmodule Reencodarr.Media.PropertyTest do property "audio codec lists are non-empty" do check all(codecs <- audio_codecs_generator()) do assert is_list(codecs) - assert length(codecs) > 0 + assert not Enum.empty?(codecs) assert length(codecs) <= 3 Enum.each(codecs, fn codec -> diff --git a/test/reencodarr/media_test.exs b/test/reencodarr/media_test.exs index f65c97d1..18c16d37 100644 --- a/test/reencodarr/media_test.exs +++ b/test/reencodarr/media_test.exs @@ -782,7 +782,7 @@ defmodule Reencodarr.MediaTest do {:ok, _} = Media.mark_as_crf_searched(analyzed) videos = Media.list_videos_by_estimated_percent(10) - assert length(videos) >= 1 + assert not Enum.empty?(videos) end test "delete_video_with_vmafs/1 deletes video and associated VMAFs" do @@ -895,7 +895,7 @@ defmodule Reencodarr.MediaTest do ) failures = Media.get_video_failures(video.id) - assert length(failures) >= 1 + assert not Enum.empty?(failures) assert Enum.all?(failures, &(&1.resolved == false)) end @@ -929,7 +929,7 @@ defmodule Reencodarr.MediaTest do stats = Media.get_failure_statistics() assert is_list(stats) - assert length(stats) > 0 + assert not Enum.empty?(stats) end test "get_common_failure_patterns/1 returns common patterns" do @@ -975,7 +975,7 @@ defmodule Reencodarr.MediaTest do vmafs = Media.list_videos_by_estimated_percent(10) - assert length(vmafs) >= 1 + assert not Enum.empty?(vmafs) assert Enum.any?(vmafs, fn v -> v.video_id == video.id end) end @@ -986,7 +986,7 @@ defmodule Reencodarr.MediaTest do result = Media.get_next_for_encoding_by_time() assert is_list(result) - if length(result) > 0, do: assert(hd(result).chosen == true) + if not Enum.empty?(result), do: assert(hd(result).chosen == true) end test "debug_encoding_queue_by_library/1 returns queue debug info" do @@ -1381,7 +1381,7 @@ defmodule Reencodarr.MediaTest do failures = Media.get_video_failures(video.id) - assert length(failures) >= 1 + assert not Enum.empty?(failures) assert hd(failures).failure_stage == :encoding end @@ -1438,7 +1438,7 @@ defmodule Reencodarr.MediaTest do results = Media.get_next_for_encoding(5) assert is_list(results) - assert length(results) >= 1 + assert not Enum.empty?(results) end test "get_next_for_encoding/1 with no limit returns single result" do @@ -1900,7 +1900,7 @@ defmodule Reencodarr.MediaTest do {:error, changeset} = Media.create_vmaf(%{}) # Should have at least one required field error - assert length(changeset.errors) > 0 + assert not Enum.empty?(changeset.errors) assert Keyword.has_key?(changeset.errors, :score) or Keyword.has_key?(changeset.errors, :crf) or diff --git a/test/reencodarr/rules_test.exs b/test/reencodarr/rules_test.exs index 7d730fcb..1771f8ce 100644 --- a/test/reencodarr/rules_test.exs +++ b/test/reencodarr/rules_test.exs @@ -475,7 +475,7 @@ defmodule Reencodarr.RulesTest do result = Rules.build_args(video, :encode, nil) assert is_list(result) - assert length(result) > 0 + assert not Enum.empty?(result) end test "handles empty additional_params" do @@ -483,7 +483,7 @@ defmodule Reencodarr.RulesTest do result = Rules.build_args(video, :encode, []) assert is_list(result) - assert length(result) > 0 + assert not Enum.empty?(result) end test "handles malformed additional_params gracefully" do diff --git a/test/reencodarr/sync_performance_test.exs b/test/reencodarr/sync_performance_test.exs index 09b9b5d8..0ba8ba96 100644 --- a/test/reencodarr/sync_performance_test.exs +++ b/test/reencodarr/sync_performance_test.exs @@ -164,7 +164,7 @@ defmodule Reencodarr.SyncPerformanceTest do end) # Simulate analyzer dispatch (our optimization) - if length(files_needing_analysis) > 0 do + if not Enum.empty?(files_needing_analysis) do AnalyzerBroadway.dispatch_available() end end) From d7068ed7040e2fa7a29ab42b2642036de2abca37 Mon Sep 17 00:00:00 2001 From: Mika Cohen Date: Thu, 11 Dec 2025 11:09:28 -0700 Subject: [PATCH 11/11] Use existing Retry.retry_on_db_busy instead of custom retry logic As per PR feedback, replace the custom retry_state_transition/4 function with the existing Reencodarr.Core.Retry.retry_on_db_busy/2 that provides exponential backoff, jitter, and consistent error handling across the codebase. --- lib/reencodarr/analyzer/broadway.ex | 51 ++++------------------------- 1 file changed, 7 insertions(+), 44 deletions(-) diff --git a/lib/reencodarr/analyzer/broadway.ex b/lib/reencodarr/analyzer/broadway.ex index c382e607..34e91cc8 100644 --- a/lib/reencodarr/analyzer/broadway.ex +++ b/lib/reencodarr/analyzer/broadway.ex @@ -17,6 +17,7 @@ defmodule Reencodarr.Analyzer.Broadway do Processing.Pipeline } + alias Reencodarr.Core.Retry alias Reencodarr.Dashboard.Events alias Reencodarr.Media alias Reencodarr.Media.{Codecs, Video} @@ -606,9 +607,10 @@ defmodule Reencodarr.Analyzer.Broadway do Logger.debug("Video #{video.path} #{reason}, marking as encoded (skipping all processing)") result = - retry_state_transition( + Retry.retry_on_db_busy( fn -> Media.mark_as_encoded(video) end, - video.path + max_attempts: 10, + base_backoff_ms: 50 ) case result do @@ -644,9 +646,10 @@ defmodule Reencodarr.Analyzer.Broadway do # Validate video has required fields before marking as analyzed if has_required_mediainfo_fields?(video) do result = - retry_state_transition( + Retry.retry_on_db_busy( fn -> Media.mark_as_analyzed(video) end, - video.path + max_attempts: 10, + base_backoff_ms: 50 ) case result do @@ -776,44 +779,4 @@ defmodule Reencodarr.Analyzer.Broadway do # Return empty list to indicate failure - calling code should handle this [] end - - # Retry state transition with exponential backoff for database busy errors - defp retry_state_transition(fun, video_path, max_retries \\ 10) do - retry_state_transition(fun, video_path, max_retries, 0) - end - - defp retry_state_transition(fun, video_path, max_retries, attempt) - when attempt < max_retries do - fun.() - rescue - error in [Exqlite.Error] -> - case error.message do - "Database busy" -> - # Exponential backoff with max cap at 5 seconds - base_wait = (:math.pow(2, attempt) * 50) |> round() - wait_time = min(base_wait, 5_000) - - Logger.debug( - "Database busy updating #{video_path} on attempt #{attempt + 1}/#{max_retries}, retrying in #{wait_time}ms" - ) - - Process.sleep(wait_time) - retry_state_transition(fun, video_path, max_retries, attempt + 1) - - _ -> - {:error, error} - end - - other_error -> - {:error, other_error} - end - - defp retry_state_transition(_fun, video_path, max_retries, attempt) - when attempt >= max_retries do - Logger.error( - "Failed to update state for #{video_path} after #{max_retries} attempts due to database busy" - ) - - {:error, :database_busy} - end end