Initial commit
This commit is contained in:
commit
a355c3105d
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
__pycache__
|
3
README.md
Normal file
3
README.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Project Collapse
|
||||
I basically I'm just playing around with the idea of solving neural networks for their minimal loss in a closed form (without iteration).
|
||||
I do realize that this is impossible (because it violates physics), but I want to get a better and more intuitive understanding to why it is impossible from a practical information perspective.
|
39
collapse.py
Normal file
39
collapse.py
Normal file
@ -0,0 +1,39 @@
|
||||
from sympy import *
|
||||
import math
|
||||
|
||||
class Model():
|
||||
def __init__(self):
|
||||
self.layers = []
|
||||
|
||||
def append(self, layer):
|
||||
if len(self.layers)!=0:
|
||||
layer._connect(self.layers[-1])
|
||||
self.layers.append(layer)
|
||||
|
||||
def collapse(self):
|
||||
act = symbols(' '.join(['x'+str(i) for i in range(self.layers[0].length)]))
|
||||
for layer in self.layers[1:]:
|
||||
act = layer.activate(act)
|
||||
return list(act)
|
||||
|
||||
class Layer():
|
||||
def __init__(self, length):
|
||||
self.length = length
|
||||
self.layNum = 0
|
||||
|
||||
def _connect(self, prev):
|
||||
self.prev = prev
|
||||
self.weights = []
|
||||
self.layNum = self.prev.layNum + 1
|
||||
for w in range(self.length * (self.prev.length+1)):
|
||||
self.weights.append(symbols('w'+str(self.layNum)+'_'+str(w)))
|
||||
|
||||
def activate(self, inp):
|
||||
act = []
|
||||
for neuron in range(self.length):
|
||||
accum = self.weights[(self.prev.length+1)*neuron]*self.prev.length
|
||||
for iNeuron in range(self.prev.length):
|
||||
accum += inp[iNeuron]*self.weights[(self.prev.length+1)*neuron+iNeuron+1]
|
||||
a = 1/(1+math.e**(-(accum)/self.prev.length))
|
||||
act.append(a)
|
||||
return act
|
70
diySym.py
Normal file
70
diySym.py
Normal file
@ -0,0 +1,70 @@
|
||||
class Tracer():
|
||||
def __init__(self):
|
||||
raise Exception("Use BaseTracer instead of Tracer")
|
||||
|
||||
def _push(self, op, obj):
|
||||
return CompTracer(self, obj, op)
|
||||
|
||||
def __add__(self, other):
|
||||
return self._push("+", other)
|
||||
|
||||
def __sub__(self, other):
|
||||
return self._push("-", other)
|
||||
|
||||
def __mul__(self, other):
|
||||
return self._push("*", other)
|
||||
|
||||
def __truediv__(self, other):
|
||||
return self._push("/", other)
|
||||
|
||||
def __pow__(self, other):
|
||||
return self._push("**", other)
|
||||
|
||||
def __pos__(self):
|
||||
return self
|
||||
|
||||
def __neg__(self):
|
||||
return self._push("*", -1)
|
||||
|
||||
def __repr__(self):
|
||||
v = self.eval()
|
||||
return "<Tracer: "+str(self)+" = "+str(v if v != None else 'Undefined')+">"
|
||||
|
||||
class BaseTracer(Tracer):
|
||||
def __init__(self, name, val=None):
|
||||
self.name = name
|
||||
self.val = val
|
||||
|
||||
def eval(self):
|
||||
return self.val
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class CompTracer(Tracer):
|
||||
def __init__(self, lParent, rParent, op):
|
||||
if not isinstance(lParent, Tracer):
|
||||
lParent = BaseTracer(repr(lParent), lParent)
|
||||
if not isinstance(rParent, Tracer):
|
||||
rParent = BaseTracer(repr(rParent), rParent)
|
||||
self.parents = [lParent, rParent]
|
||||
self.op = op
|
||||
|
||||
def eval(self):
|
||||
l = self.parents[0].eval()
|
||||
r = self.parents[1].eval()
|
||||
if None in [l,r]:
|
||||
return None
|
||||
if self.op=="+":
|
||||
return l + r
|
||||
elif self.op=="-":
|
||||
return l - r
|
||||
elif self.op=="*":
|
||||
return l * r
|
||||
elif self.op=="/":
|
||||
return l / r
|
||||
elif self.op=="**":
|
||||
return l ** r
|
||||
|
||||
def __str__(self):
|
||||
return "("+str(self.parents[0])+" "+self.op+" "+str(self.parents[1])+")"
|
Loading…
Reference in New Issue
Block a user