
    .h1,                        d dl mZ d dlmZ d dlmZ d dlmZ d dlZ	d dl
Z
d dlmc mZ d dlmZ d dlmZmZmZ d dlmZ d d	lmZmZ  G d
 de      Zy)    )annotations)
ThreadPool)Path)AnyN)DetectionValidator)LOGGERNUM_THREADSops)check_requirements)SegmentMetricsmask_iouc                       e Zd ZdZdd fdZd fdZd fdZddZd fdZd fdZ	d fdZ
d fd	Zdd
Zd fdZd fdZd fdZ xZS )SegmentationValidatora  
    A class extending the DetectionValidator class for validation based on a segmentation model.

    This validator handles the evaluation of segmentation models, processing both bounding box and mask predictions
    to compute metrics such as mAP for both detection and segmentation tasks.

    Attributes:
        plot_masks (list): List to store masks for plotting.
        process (callable): Function to process masks based on save_json and save_txt flags.
        args (namespace): Arguments for the validator.
        metrics (SegmentMetrics): Metrics calculator for segmentation tasks.
        stats (dict): Dictionary to store statistics during validation.

    Examples:
        >>> from ultralytics.models.yolo.segment import SegmentationValidator
        >>> args = dict(model="yolo11n-seg.pt", data="coco8-seg.yaml")
        >>> validator = SegmentationValidator(args=args)
        >>> validator()
    c                x    t         |   ||||       d| _        d| j                  _        t               | _        y)a  
        Initialize SegmentationValidator and set task to 'segment', metrics to SegmentMetrics.

        Args:
            dataloader (torch.utils.data.DataLoader, optional): Dataloader to use for validation.
            save_dir (Path, optional): Directory to save results.
            args (namespace, optional): Arguments for the validator.
            _callbacks (list, optional): List of callback functions.
        Nsegment)super__init__processargstaskr   metrics)self
dataloadersave_dirr   
_callbacks	__class__s        a/var/www/html/ai-service/venv/lib/python3.12/site-packages/ultralytics/models/yolo/segment/val.pyr   zSegmentationValidator.__init__(   s4     	XtZ@"		%'    c                R    t         |   |      }|d   j                         |d<   |S )z
        Preprocess batch of images for YOLO segmentation validation.

        Args:
            batch (dict[str, Any]): Batch containing images and annotations.

        Returns:
            (dict[str, Any]): Preprocessed batch.
        masks)r   
preprocessfloat)r   batchr   s     r   r!   z SegmentationValidator.preprocess7   s/     "5)w--/gr   c                   t         |   |       | j                  j                  rt	        d       | j                  j                  s| j                  j
                  rt        j                  | _	        yt        j                  | _	        y)z
        Initialize metrics and select mask processing function based on save_json flag.

        Args:
            model (torch.nn.Module): Model to validate.
        zfaster-coco-eval>=1.6.7N)
r   init_metricsr   	save_jsonr   save_txtr
   process_mask_nativeprocess_maskr   )r   modelr   s     r   r%   z"SegmentationValidator.init_metricsE   sZ     	U#998926))2E2EI[I[s..adaqaqr   c                    ddz  S )z5Return a formatted description of evaluation metrics.z,%22s%11s%11s%11s%11s%11s%11s%11s%11s%11s%11s)ClassImages	InstanceszBox(PRmAP50	mAP50-95)zMask(Pr/   r0   r1    )r   s    r   get_desczSegmentationValidator.get_descR   s    $ )
 
 	
r   c                   t        |d         dk(  r|d   d   n|d   }t        | 	  |d         }|j                  dd D cg c]  }d|z  	 }}t	        |      D ]  \  }}|j                  d      }|j                  d   r| j                  ||   ||d	   |
      nat        j                  dg| j                  t        j                  u r|n|j                  dd t        j                  |d	   j                        |d<    |S c c}w )a  
        Post-process YOLO predictions and return output detections with proto.

        Args:
            preds (list[torch.Tensor]): Raw predictions from the model.

        Returns:
            list[dict[str, torch.Tensor]]: Processed detection predictions with masks.
              r      N   extrabboxes)shape)dtypedevicer    )lenr   postprocessr<   	enumeratepopr   torchzerosr
   r(   uint8r>   )	r   predsprotoximgszipredcoefficientr   s	           r   r@   z!SegmentationValidator.postprocessb   s    !$E!H 2aa#E!H- %AB01Q00 ' 
	GAt((7+K $$Q' U1X{DN%P[[a4<<33J3J#J%PUP[P[\]\^P_a++>00 M
	  1s   Dc                :   t         	|   ||      }|d   j                  d   }| j                  j                  rR|d   |   }t        j                  d|dz   |j                        j                  |dd      }||k(  j                         }n|d   |d   |k(     }|ru|d   D cg c]%  }| j                  t        j                  u r|n|dz  ' }}|j                  dd	 |k7  r0t        j                  |d	   |d
d      d   }|j                  d      }||d<   |S c c}w )a<  
        Prepare a batch for training or inference by processing images and targets.

        Args:
            si (int): Batch index.
            batch (dict[str, Any]): Batch data containing images and annotations.

        Returns:
            (dict[str, Any]): Prepared batch with processed annotations.
        clsr   r    r5   )r>   	batch_idxrI   r9   NbilinearF)modealign_cornersg      ?)r   _prepare_batchr<   r   overlap_maskrC   aranger>   viewr"   r   r
   r(   Finterpolategt_)
r   sir#   prepared_batchnlr    indexs	mask_sizer   s
            r   rS   z$SegmentationValidator._prepare_batch|   s%    /E:E"((+99!!'N2&ELLBF5<<@EEb!QOEe^**,E'N5#5#;<E[ijq[rsVWdllc.E.EE1PQ6QsIs{{12)+eDk9:]bcdef		#"'w ts   #*Dc                   t         |   ||      }|d   }|j                  d   dk(  s|d   j                  d   dk(  r8t        j                  |d   j                  d   | j
                  ft              }npt        |d   j                  d      |d   j                  d      j                               }| j                  |d   ||      j                         j                         }|j                  d|i       |S )a	  
        Compute correct prediction matrix for a batch based on bounding boxes and optional masks.

        Args:
            preds (dict[str, torch.Tensor]): Dictionary containing predictions with keys like 'cls' and 'masks'.
            batch (dict[str, Any]): Dictionary containing batch data with keys like 'cls' and 'masks'.

        Returns:
            (dict[str, np.ndarray]): A dictionary containing correct prediction matrices including 'tp_m' for mask IoU.

        Notes:
            - If `masks` is True, the function computes IoU between predicted and ground truth masks.
            - If `overlap` is True and `masks` is True, overlapping masks are taken into account when computing IoU.

        Examples:
            >>> preds = {"cls": torch.tensor([1, 0]), "masks": torch.rand(2, 640, 640), "bboxes": torch.rand(2, 4)}
            >>> batch = {"cls": torch.tensor([1, 0]), "masks": torch.rand(2, 640, 640), "bboxes": torch.rand(2, 4)}
            >>> correct_preds = validator._process_batch(preds, batch)
        rN   r   r=   r    r5   tp_m)r   _process_batchr<   nprD   niouboolr   flattenr"   match_predictionscpunumpyupdate)r   rF   r#   tpgt_clsrb   iour   s          r   rc   z$SegmentationValidator._process_batch   s    ( W#E51u<<?a5<#5#5a#8A#=88U5\//2DII>dKD5>11!4eGn6L6LQ6O6U6U6WXC))%,DHHJPPRD
		64.!	r   c                   |D ]  }|d   }|j                   d   | j                  j                  kD  r-t        j                  d| j                  j                   d       t        j                  |d| j                  j                   t
        j                        j                         |d<    t        | )  |||| j                  j                         y)a  
        Plot batch predictions with masks and bounding boxes.

        Args:
            batch (dict[str, Any]): Batch containing images and annotations.
            preds (list[dict[str, torch.Tensor]]): List of predictions from the model.
            ni (int): Batch index.
        r    r   z&Limiting validation plots to 'max_det=z' items.Nra   )max_det)r<   r   rp   r   warningrC   	as_tensorrE   ri   r   plot_predictions)r   r#   rF   nipr    r   s         r   rs   z&SegmentationValidator.plot_predictions   s      	^AgJE{{1~		 1 11!G		HYHYGZZbcd/B1B1B)C5;;W[[]AgJ		^
 	 r499;L;L Mr   c                   ddl m}  |t        j                  |d   |d   ft        j                        d| j
                  t        j                  |d   |d   j                  d      |d	   j                  d      gd
      t        j                  |d   t        j                              j                  ||       y)a  
        Save YOLO detections to a txt file in normalized coordinates in a specific format.

        Args:
            predn (torch.Tensor): Predictions in the format (x1, y1, x2, y2, conf, class).
            save_conf (bool): Whether to save confidence scores.
            shape (tuple[int, int]): Shape of the original image.
            file (Path): File path to save the detections.
        r   )Resultsr5   ra   Nr;   confr7   rN   )dimr    )pathnamesboxesr    )	save_conf)ultralytics.engine.resultsrw   rd   rD   rE   r{   rC   cat	unsqueezerr   r'   )r   prednr}   r<   filerw   s         r   save_one_txtz"SegmentationValidator.save_one_txt   s     	7HHeAha):**))U8_eFm.E.Eb.I5QV<KaKabdKeflmn//%.D	
 (49(
-r   c                @  	 ddl m	 	fd}t        j                  |d   d      }t	        t
              5 }|j                  ||      }ddd       t        
| !  ||       t              D ]$  \  }}|| j                  t        |       |z      d<   & y# 1 sw Y   LxY w)a0  
        Save one JSON result for COCO evaluation.

        Args:
            predn (dict[str, torch.Tensor]): Predictions containing bboxes, masks, confidence scores, and classes.
            pbatch (dict[str, Any]): Batch dictionary containing 'imgsz', 'ori_shape', 'ratio_pad', and 'im_file'.
        r   )encodec                     t        j                  | dddddf   dd            d   }|d   j                  d      |d<   |S )z:Encode predicted masks as RLE and append results to jdict.NrW   rE   )orderr=   r   countszutf-8)rd   asarraydecode)rH   rler   s     r   single_encodez9SegmentationValidator.pred_to_json.<locals>.single_encode   sF    AaDjMGLMaPCM009CMJr   r    )r8   r   r5   Nsegmentation)faster_coco_eval.core.maskr   rd   	transposer   r	   mapr   pred_to_jsonrA   jdictr?   )r   r   pbatchr   
pred_maskspoolrlesrJ   rr   r   s            @r   r   z"SegmentationValidator.pred_to_json   s     	6	 \\%.)<
$ 	788M:6D	7UF+dO 	;DAq9:DJJD	zA~&~6	;	7 	7s   BBc           	     *   i t         |   ||      dt        j                  t	        j
                  |d   t        j                        j                  ddd      j                         j                         j                         |d   |d         iS )	z.Scales predictions to the original image size.r    ra   r5   r8   r   	ori_shape	ratio_pad)r   )r   scale_predsr
   scale_imagerC   rr   rE   permute
contiguousri   rj   )r   r   r   r   s      r   r   z!SegmentationValidator.scale_preds   s    
g!%0
S__gekkBJJ1aQRS^^`ddflln{# -
 	
r   c                    | j                   dz  }| j                  d   dz  | j                  rdnd| j                  j                   dz  }t
        |   |||ddgd	d
g      S )z;Return COCO-style instance segmentation evaluation metrics.zpredictions.jsonrz   r   zinstances_val2017.jsonlvis_v1_z.jsonbboxsegmBoxMask)suffix)r   datais_cocor   splitr   coco_evaluate)r   stats	pred_json	anno_jsonr   s       r   	eval_jsonzSegmentationValidator.eval_json   sy    MM$66	IIf+/<<'x		GXX]=^` 	
 w$UIy66BR\aci[j$kkr   )NNNN)returnNone)r#   dict[str, Any]r   r   )r*   ztorch.nn.Moduler   r   )r   str)rF   zlist[torch.Tensor]r   list[dict[str, torch.Tensor]])rZ   intr#   r   r   r   )rF   dict[str, torch.Tensor]r#   r   r   zdict[str, np.ndarray])r#   r   rF   r   rt   r   r   r   )
r   ztorch.Tensorr}   rf   r<   ztuple[int, int]r   r   r   r   )r   r   r   r   r   r   )r   r   r   r   r   r   )r   r   r   r   )__name__
__module____qualname____doc__r   r!   r%   r3   r@   rS   rc   rs   r   r   r   r   __classcell__)r   s   @r   r   r      sM    ((r
 46<N .(;.	
l lr   r   )
__future__r   multiprocessing.poolr   pathlibr   typingr   rj   rd   rC   torch.nn.functionalnn
functionalrW   ultralytics.models.yolo.detectr   ultralytics.utilsr   r	   r
   ultralytics.utils.checksr   ultralytics.utils.metricsr   r   r   r2   r   r   <module>r      s?    # +       = 6 6 7 >pl. plr   