Coverage for /home/runner/work/AutoDiff/AutoDiff/autodiff/reversemode.py: 100%


Generated by Amelia Li for AutoDiff. (GitHub Profile)

47 statements  

« prev     ^ index     » next       coverage.py v7.4.0, created at 2024-01-07 04:22 +0000

1import inspect 

2import numpy as np 

3 

4from autodiff.ad import AD 

5from autodiff.node import Node 

6 

7class ReverseMode(AD): 

8 """Reverse mode implementation based on nodes.""" 

9 

10 def get_gradients(node): 

11 """  

12 Compute the derivatives of `node` with respect to child nodes. 

13 

14 Returns 

15 ------- 

16 f'(x) 

17 The method returns the derivative(s) of `node` with respect to child nodes. 

18  

19 """ 

20 gradients = {} 

21 

22 def compute_gradients(node, v): 

23 for child, gradient in node.gradients: 

24 v_child = v * gradient 

25 gradients[child] = gradients.get(child, 0) + v_child 

26 compute_gradients(child, v_child) 

27 

28 compute_gradients(node, 1) 

29 return gradients 

30 

31 def get_results(self, x): 

32 """ 

33 Compute the value(s) and the derivative(s) of the function(s) based on input x. 

34 

35 Parameters 

36 ---------- 

37 x : Scalar, Vector.  

38 The point at which the value(s) and derivative(s) of the function(s) are evaluated.  

39 

40 Returns 

41 ------- 

42 f(x) and f'(x) 

43 The method returns both the value(s) and the derivative(s) of the function(s) at 'x'. 

44 

45 Raises 

46 ------ 

47 TypeError 

48 This method raises a `TypeError` if the type of input x is not supported. 

49  

50 ValueError 

51 This method also raises a `ValueError` if the dimension of input x is not matched with the function(s). 

52  

53 """ 

54 # check that x is of supported type 

55 if not isinstance(x, self._supported_vectors): 

56 raise TypeError(f"Unsupported type '{type(x)}'") 

57 

58 # check that x is 1-dimensional 

59 if len(np.shape(x)) != 1: 

60 raise ValueError(f"Input variables should be a 1-dimensional.") 

61 

62 # convert x to a list 

63 x = list(x) 

64 

65 # if there are multiple functions 

66 if self.jacobian: 

67 jacobian = [] 

68 vals = [] 

69 for f in self.f: 

70 # initialize arguments list 

71 args = [] 

72 nodes = [] 

73 

74 # get the function arguments 

75 function_args = inspect.getfullargspec(f)[0] 

76 

77 # if the function argument is an input, add it to the arguments list 

78 for i, input in enumerate(self.inputs): 

79 if input in function_args: 

80 node = Node(x[i]) 

81 args.append(node) 

82 nodes.append(node) 

83 # pad with 0 when the variable is not used in the function 

84 else: 

85 nodes.append(0) 

86 

87 # unpack args and pass into f 

88 z = f(*args) 

89 gradients = ReverseMode.get_gradients(z) 

90 vals.append(z.val) 

91 j = [] 

92 

93 # fill jacobian with results 

94 for i, input in enumerate(self.inputs): 

95 if input in function_args: 

96 j.append(gradients[nodes[i]]) 

97 else: 

98 j.append(0) 

99 jacobian.append(np.array(j)) 

100 

101 return np.array([np.array(vals), jacobian], dtype = object) 

102 

103 # if there is one function 

104 else: 

105 # convert every element in args to a node 

106 args = [Node(arg) for arg in x] 

107 

108 # unpack args and pass into f 

109 z = self.f(*args) 

110 gradients = ReverseMode.get_gradients(z) 

111 return np.array([z.val, np.array([gradients[node] for node in args])], dtype = object)