Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Autodiff #77

Merged
merged 30 commits into from
Mar 12, 2022
Merged
Changes from 1 commit
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
e10f137
Begin initialising autodiff submodule
JHay0112 Jan 29, 2022
9d1a758
Start implementing normal(ish) function behaviour
JHay0112 Feb 5, 2022
4a01a26
Merge branch 'autodiff' of https://github.com/JHay0112/jmath into aut…
JHay0112 Feb 5, 2022
1803d8d
Functions mostly working classicly, order broken
JHay0112 Feb 5, 2022
84b271f
Start work on differentiation code
JHay0112 Feb 5, 2022
a58f194
Refined approach, still needs tweaked
JHay0112 Feb 22, 2022
0a94dc4
Another broken implementation
JHay0112 Feb 25, 2022
2227270
IT WORKS! (Mostly)
JHay0112 Feb 27, 2022
fe31db8
Power handling
JHay0112 Feb 27, 2022
cad346e
Implement some common derivatives
JHay0112 Mar 3, 2022
890ea13
Slightly more flexible function def
JHay0112 Mar 3, 2022
08b5f0c
Analyse function
JHay0112 Mar 3, 2022
c74e87d
Support multiple diff levels
JHay0112 Mar 3, 2022
f18f25a
Clean up input setting
JHay0112 Mar 3, 2022
8b50bc7
Predefine common variables
JHay0112 Mar 7, 2022
8fd5456
Extend to include uppercase
JHay0112 Mar 7, 2022
c7ec0d3
Seperate auto-differentiation package
JHay0112 Mar 10, 2022
3ae09bb
Rebuild docs to match
JHay0112 Mar 10, 2022
bba1958
Fix broken function derivatives
JHay0112 Mar 11, 2022
65c3bf1
Add some more commonly needed derivatives
JHay0112 Mar 11, 2022
f2d8614
Minor tweaks
JHay0112 Mar 11, 2022
94bea30
Multi-layer differentiability
JHay0112 Mar 11, 2022
570d32f
Clarify derivative definition
JHay0112 Mar 11, 2022
fb475c3
Crappy function printing
JHay0112 Mar 11, 2022
48f480f
Tack this on too
JHay0112 Mar 11, 2022
b27b2e0
Short hand differentiation
JHay0112 Mar 11, 2022
527c5aa
Start writing tests
JHay0112 Mar 11, 2022
19fb46f
Minor differentiator tweaks
JHay0112 Mar 12, 2022
762d257
Trigonometric tests
JHay0112 Mar 12, 2022
382e19f
More autodiff tests!
JHay0112 Mar 12, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Another broken implementation
  • Loading branch information
JHay0112 committed Feb 25, 2022
commit 0a94dc4aa9c1b34e0d7e680f4fd12c7172fe6d6d
138 changes: 60 additions & 78 deletions jmath/approximation/autodiff.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,32 @@
# - Typing

Supported = Union[int, float, Uncertainty, 'Function', 'Variable']
Numeric = Union[int, float, Uncertainty]

# - Classes

class Variable:
class Function:
'''
Allows a function to be applied to a variable input.
Automatic Differentiation Function Object

Parameters
-----------

func
Represented function.
diff
Tuple of partial derivatives of the function.
'''
def __init__(self):
def __init__(self, func: Callable, diff: Tuple[Callable]):

self.output_of = None
self.input_of = set()
self.value = None
self.inputs = []
self.func = func
self.diff = diff

# Check if diff is not a tuple
if not isinstance(diff, tuple):
# If not then we shall make it one
self.diff = (diff,)

def __add__(self, other: Supported) -> 'Function':

Expand All @@ -31,7 +45,9 @@ def __add__(self, other: Supported) -> 'Function':
return self
elif isinstance(other, (int, float, Uncertainty)):
# Numeric case
return Function(lambda x: x + other, lambda x: 1)
f = Function(lambda x: x + other, lambda x: 1)
f.register(self)
return f
else:
# Variable case
return Function(op.add, (lambda x, y: 1, lambda x, y: 1))
Expand Down Expand Up @@ -68,7 +84,9 @@ def __mul__(self, other: Supported) -> 'Function':
return self
elif isinstance(other, (int, float, Uncertainty)):
# Numeric case
return Function(lambda x: other * x, lambda x, y: other)
f = Function(lambda x: other * x, lambda x, y: other)
f.register(self)
return f
else:
# Variable case
return Function(op.mul, (lambda x, y: y, lambda x, y: x))
Expand Down Expand Up @@ -101,83 +119,47 @@ def __rtruediv__(self, other: Supported) -> 'Function':
# Variable case
return Function(op.rtruediv, (lambda x, y: 1/y, lambda x, y: -x/(y**2)))

class Function(Variable):
'''
A Differentiable Function.

Parameters
----------

func
The callable function to be represented.
diff
A tuple of functions produced upon differentiation with respect to the function variables.
'''
def __init__(self, func: Callable, diff: Tuple[Callable]):

self.vars = []
self.func = func
self.diff = diff
self.output = Variable()
self.output.output_of = self
def register(self, *inputs: 'Function', clear: bool = True):
'''
Registers inputs to the function.

# Check if diff is not a tuple
if not isinstance(diff, tuple):
# If not then we shall make it one
self.diff = (diff,)
Parameters
----------

def __call__(self, *inputs: Union[int, float, Uncertainty, Variable, 'Function']) -> Union[int, float, Uncertainty, 'Function']:
'''Evaluate the function.'''

if len(inputs) == 0:
# Call inner functions
for var in self.vars:
if var.output_of is not None:
var.output_of()
# Use var values
inputs = tuple(var.value for var in self.vars)
# Now execute the function
self.output.value = self.func(*inputs)
return self.output.value
elif all(isinstance(input, (Variable, Function)) for input in inputs):
# If all inputs are functions/variables
# Clear current inputs
self.vars = []
# Then let's register the inputs
for input in inputs:
if isinstance(input, Variable):
self.vars.append(input)
input.input_of.add(self)
elif isinstance(input, Function):
self.vars.append(input.output)
input.output.input_of.add(self)
# Let's return the function for use
return self
else:
# Real inputs
# Evaluate the function
return self.func(*inputs)
inputs
Args, the functions to register as inputs.
clear
Clear function inputs before registering.
'''
if clear:
self.inputs = []
for input in inputs:
self.inputs.append(input)

def differentiate(self, var: Variable) -> 'Function':
def differentiate(self, wrt: 'Variable') -> Callable:
'''
Produces the partial differential of the function with respect to the specified variable.
Differentiates the function with respect to a variable.

Parameters
----------

var
The variable to differentiate with repsect to.
wrt
The variable to differentiate with respect to.
'''
# Derivative produced
# The differentiated function
func = 0
# Go down each 'branch'
for i, input_var in enumerate(self.vars):
branch = self.diff[i]
# Check if it is 'owned' by a function
if input_var.output_of is not None:
# Then derive that function by the same variable and add it
func += branch * input_var.output_of.differentiate(var)
else:
func = branch

return func
# Move across inputs
for i, input in enumerate(self.inputs):
# Get respective derivative
partial = Function(self.diff[i], lambda x: 1)
func += partial * input.differentiate(wrt)

return func

class Variable(Function):
'''
Variables for function differentiation.
'''
def __init__(self):

super().__init__(lambda x: x, lambda x: 1)