Coverage for local_installation_linux/mumott/optimization/regularizers/l1_norm.py: 87%

33 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2024-08-11 23:08 +0000

1import numpy as np 

2from numpy.typing import NDArray 

3 

4from mumott.optimization.regularizers.base_regularizer import Regularizer 

5import logging 

6logger = logging.getLogger(__name__) 

7 

8 

9class L1Norm(Regularizer): 

10 

11 r"""Regularizes using the :math:`L_1` norm of the coefficient vector, also known as the 

12 Manhattan or taxicab norm. 

13 Suitable for scalar fields or tensor fields in local representations. Tends to reduce noise. 

14 

15 The :math:`L_1` norm of a vector :math:`x` is given by :math:`\sum{\vert x \vert}`. 

16 

17 See also `this Wikipedia article <https://en.wikipedia.org/wiki/Taxicab_geometry>`_. 

18 """ 

19 

20 def __init__(self): 

21 super().__init__() 

22 

23 def get_regularization_norm(self, 

24 coefficients: NDArray[float], 

25 get_gradient: bool = False, 

26 gradient_part: str = None) -> dict[str, NDArray[float]]: 

27 """Retrieves the :math:`L_1` norm, also called the Manhattan or taxicab norm, of the 

28 coefficients. Appropriate for use with scalar fields or tensor fields in local basis sets. 

29 

30 Parameters 

31 ---------- 

32 coefficients 

33 An ``np.ndarray`` of values, with shape ``(X, Y, Z, W)``, where 

34 the last channel contains, e.g., tensor components. 

35 get_gradient 

36 If ``True``, returns a ``'gradient'`` of the same shape as :attr:`coefficients`. 

37 Otherwise the entry ``'gradient'`` will be ``None``. Defaults to ``False``. 

38 gradient_part 

39 Used for the zonal harmonics resonstructions to determine what part of the gradient is 

40 being calculated. Default is None. If a flag is passed in ('full', 'angles', 'coefficients'), 

41 we assume that the ZH workflow is used and that the last two coefficients are euler angles, 

42 which should not be regularized by this regularizer. 

43 

44 Returns 

45 ------- 

46 A dictionary with two entries, ``regularization_norm`` and ``gradient``. 

47 """ 

48 

49 result = dict(regularization_norm=None, gradient=None) 

50 if get_gradient: 

51 

52 if gradient_part is None: 

53 result['gradient'] = np.sign(coefficients) 

54 elif gradient_part in ('full', 'coefficients'): 

55 result['gradient'] = np.sign(coefficients) 

56 result['gradient'][..., -2:] = 0 

57 elif gradient_part in ('angles'): 57 ↛ 60line 57 didn't jump to line 60, because the condition on line 57 was never false

58 result['gradient'] = np.zeros(coefficients.shape) 

59 else: 

60 logger.warning('Unexpected argument given for gradient part.') 

61 raise ValueError 

62 

63 if gradient_part is None: 

64 result['regularization_norm'] = np.sum(np.abs(coefficients)) 

65 elif gradient_part in ('full', 'coefficients', 'angles'): 65 ↛ 68line 65 didn't jump to line 68, because the condition on line 65 was never false

66 result['regularization_norm'] = np.sum(np.abs(coefficients[..., :-2])) 

67 else: 

68 logger.warning('Unexpected argument given for gradient part.') 

69 raise ValueError 

70 

71 return result 

72 

73 @property 

74 def _function_as_str(self) -> str: 

75 return 'R(x) = lambda * abs(x)' 

76 

77 @property 

78 def _function_as_tex(self) -> str: 

79 return r'$R(\vec{x}) = \lambda \Vert \vec{x} \Vert_1$'