
    9ix                     &    S SK Jr  S SKJr  SS jrg)    )unique)entropyc                 ,    [        U SS9u  p#[        X1S9$ )a6  Calculate the Shannon entropy of an image.

The Shannon entropy is defined as S = -sum(pk * log(pk)),
where pk are frequency/probability of pixels of value k.

Parameters
----------
image : (M, N) ndarray
    Grayscale input image.
base : float, optional
    The logarithmic base to use.

Returns
-------
entropy : float

Notes
-----
The returned value is measured in bits or shannon (Sh) for base=2, natural
unit (nat) for base=np.e and hartley (Hart) for base=10.

References
----------
.. [1] `https://en.wikipedia.org/wiki/Entropy_(information_theory) <https://en.wikipedia.org/wiki/Entropy_(information_theory)>`_
.. [2] https://en.wiktionary.org/wiki/Shannon_entropy

Examples
--------
>>> from skimage import data
>>> from skimage.measure import shannon_entropy
>>> shannon_entropy(data.camera())
7.231695011055706
T)return_counts)base)r   scipy_entropy)imager   _countss       W/var/www/html/land-doc-ocr/venv/lib/python3.13/site-packages/skimage/measure/entropy.pyshannon_entropyr      s    F uD1IA++    N)   )numpyr   scipy.statsr   r   r    r   r   <module>r      s     0$,r   