@@ -187,7 +187,7 @@ def _decode_argument(value, default=None):
187187 return value
188188
189189
190- def _normalize_path (path ):
190+ def _get_abspath (path ):
191191 if not path :
192192 return ""
193193 return os .path .abspath (path )
@@ -269,7 +269,7 @@ def _summarize_probe(probe_data, file_path):
269269 stream = _video_stream_from_probe (probe_data )
270270 format_info = probe_data .get ("format" , {})
271271 return {
272- "path" : _normalize_path (file_path ),
272+ "path" : _get_abspath (file_path ),
273273 "codec_name" : stream .get ("codec_name" ),
274274 "codec_long_name" : stream .get ("codec_long_name" ),
275275 "profile" : stream .get ("profile" ),
@@ -388,7 +388,7 @@ def _downsample_frames(frame_scores, max_points):
388388
389389
390390def _build_analysis_paths (encoded_path , task_id ):
391- cache_dir = os .path .dirname (_normalize_path (encoded_path ))
391+ cache_dir = os .path .dirname (_get_abspath (encoded_path ))
392392 os .makedirs (cache_dir , exist_ok = True )
393393 task_label = task_id or "task"
394394 return {
@@ -530,8 +530,8 @@ def _run_vmaf_audit_child(
530530 "output_video" : _summarize_probe (encoded_probe , encoded_path ),
531531 "vmaf_summary" : summary ,
532532 "vmaf_frames" : frame_scores ,
533- "source_abspath" : _normalize_path (source_path ),
534- "analyzed_abspath" : _normalize_path (encoded_path ),
533+ "source_abspath" : _get_abspath (source_path ),
534+ "analyzed_abspath" : _get_abspath (encoded_path ),
535535 "captured_at" : datetime .datetime .utcnow ().isoformat () + "Z" ,
536536 }
537537 _write_json_file (result_path , result_payload )
@@ -548,8 +548,8 @@ def _run_vmaf_audit_child(
548548 "output_video" : {},
549549 "vmaf_summary" : {},
550550 "vmaf_frames" : [],
551- "source_abspath" : _normalize_path (source_path ),
552- "analyzed_abspath" : _normalize_path (encoded_path ),
551+ "source_abspath" : _get_abspath (source_path ),
552+ "analyzed_abspath" : _get_abspath (encoded_path ),
553553 "captured_at" : datetime .datetime .utcnow ().isoformat () + "Z" ,
554554 }
555555 _write_json_file (result_path , failure_payload )
@@ -769,7 +769,7 @@ def get_record_detail(self, record_id):
769769def _build_destination_file_data (destination_files ):
770770 results = []
771771 for path in destination_files or []:
772- abspath = _normalize_path (path )
772+ abspath = _get_abspath (path )
773773 item = {
774774 "path" : abspath ,
775775 "basename" : os .path .basename (abspath ),
@@ -900,9 +900,30 @@ def _build_overall_status(data, analysis_success):
900900 return "processing_failed" if analysis_success else "processing_failed_without_vmaf"
901901
902902
903+ def _same_path_warning (source_path , encoded_path ):
904+ return (
905+ f"[{ PLUGIN_ID } ] WARNING: Skipping VMAF audit because the worker input file "
906+ f"matches the original source file.\n "
907+ f"[{ PLUGIN_ID } ] original_file_path='{ source_path } '\n "
908+ f"[{ PLUGIN_ID } ] file_in='{ encoded_path } '\n "
909+ f"[{ PLUGIN_ID } ] This usually indicates the library or worker pipeline is "
910+ f"configured such that no transformed cache file was produced before this "
911+ f"plugin ran.\n "
912+ f"[{ PLUGIN_ID } ] Comparing a file against itself would produce meaningless "
913+ f"VMAF results, so this plugin will not run for this task."
914+ )
915+
916+
903917def _persist_audit_record (task_data_store , data ):
904918 analysis = task_data_store .get_task_state (STATE_KEY_RESULT , default = {}) or {}
905- source_path = _normalize_path (data .get ("source_data" , {}).get ("abspath" ))
919+ if not analysis :
920+ logger .info (
921+ "No VMAF analysis state found for task '%s'. Skipping audit record persistence." ,
922+ data .get ("task_id" ),
923+ )
924+ return None
925+
926+ source_path = _get_abspath (data .get ("source_data" , {}).get ("abspath" ))
906927 destination_files = _build_destination_file_data (_event_destination_files (data ))
907928 finish_time = (
908929 datetime .datetime .fromtimestamp (data ["finish_time" ])
@@ -931,13 +952,13 @@ def _persist_audit_record(task_data_store, data):
931952 or f"Task { data .get ('task_id' )} " ,
932953 "source_abspath" : source_path ,
933954 "source_basename" : os .path .basename (source_path ),
934- "analyzed_abspath" : _normalize_path (
955+ "analyzed_abspath" : _get_abspath (
935956 analysis .get ("analyzed_abspath" ) or data .get ("final_cache_path" )
936957 ),
937958 "analyzed_basename" : os .path .basename (
938959 analysis .get ("analyzed_abspath" ) or data .get ("final_cache_path" ) or ""
939960 ),
940- "final_cache_path" : _normalize_path (data .get ("final_cache_path" )),
961+ "final_cache_path" : _get_abspath (data .get ("final_cache_path" )),
941962 "destination_files_json" : _safe_json_dumps (destination_files ),
942963 "task_processing_success" : bool (data .get ("task_success" )),
943964 "file_move_processes_success" : bool (data .get ("file_move_processes_success" )),
@@ -1080,8 +1101,20 @@ def child_work(source_path, log_queue=None, prog_queue=None):
10801101 )
10811102 return
10821103
1083- source_path = _normalize_path (data .get ("original_file_path" ))
1084- encoded_path = _normalize_path (data .get ("file_in" ))
1104+ source_path = _get_abspath (data .get ("original_file_path" ))
1105+ encoded_path = _get_abspath (data .get ("file_in" ))
1106+ if source_path and encoded_path and source_path == encoded_path :
1107+ warning_message = _same_path_warning (source_path , encoded_path )
1108+ logger .warning (
1109+ "Skipping VMAF audit for task '%s' because original_file_path and file_in resolved to the same path: %s" ,
1110+ data .get ("task_id" ),
1111+ source_path ,
1112+ )
1113+ data .get ("worker_log" , []).append (f"\n { warning_message } \n " )
1114+ if task_data_store is not None :
1115+ task_data_store .set_task_state (STATE_KEY_RESULT , None )
1116+ return
1117+
10851118 result = {
10861119 "analysis_success" : False ,
10871120 "analysis_error" : "" ,
@@ -1135,7 +1168,9 @@ def emit_postprocessor_complete(data, task_data_store=None):
11351168 return
11361169
11371170 record_id = _persist_audit_record (task_data_store , data )
1138- if record_id is None :
1171+ if record_id is None and task_data_store .get_task_state (
1172+ STATE_KEY_RESULT , default = None
1173+ ):
11391174 logger .error (
11401175 "Failed writing VMAF audit result for task '%s'." , data .get ("task_id" )
11411176 )
0 commit comments