Stronger linear seperation
This commit is contained in:
parent
147a78302f
commit
9126bbcc14
11
caliGraph.py
11
caliGraph.py
@ -545,8 +545,8 @@ def addScoreToLabels(G):
|
|||||||
if 'rating' in node and node['rating'] != None:
|
if 'rating' in node and node['rating'] != None:
|
||||||
node['label'] += " ("+str(node['rating'])+")"
|
node['label'] += " ("+str(node['rating'])+")"
|
||||||
else:
|
else:
|
||||||
if 'score' in node and node['score'] != None:
|
if 'score' in node and node['score'] != None and 'se' in node:
|
||||||
node['label'] += " (~{:.2f}±{:.1f})".format(node['score'], node['se'] if 'se' in node else 0.0)
|
node['label'] += " (~{:.2f}±{:.1f})".format(node['score'], node['se'])
|
||||||
else:
|
else:
|
||||||
node['label'] += " (~0±∞)"
|
node['label'] += " (~0±∞)"
|
||||||
|
|
||||||
@ -775,7 +775,7 @@ def waveFlow(G, node, n, dist, menge, firstEdge=False):
|
|||||||
if node in bestlist or node in keeplist:
|
if node in bestlist or node in keeplist:
|
||||||
waveFlow(G, node, m, dist, menge, firstEdge=firstEdge)
|
waveFlow(G, node, m, dist, menge, firstEdge=firstEdge)
|
||||||
|
|
||||||
def evaluateFitness(books):
|
def evaluateFitness(books, debugPrint=False):
|
||||||
global weights
|
global weights
|
||||||
G = buildBookGraph(books)
|
G = buildBookGraph(books)
|
||||||
graphAddAuthors(G, books)
|
graphAddAuthors(G, books)
|
||||||
@ -808,8 +808,9 @@ def evaluateFitness(books):
|
|||||||
# reward seperation linearly
|
# reward seperation linearly
|
||||||
linSepLoss.append(abs(score - mu))
|
linSepLoss.append(abs(score - mu))
|
||||||
regressionLoss = sum([(1-w)**2 for w in weights.values()])
|
regressionLoss = sum([(1-w)**2 for w in weights.values()])
|
||||||
print(sum(errSq)/len(errSq), 0.005*regressionLoss, 0.2*boundsLoss/len(ratedBooks), 0.5*sum(linSepLoss)/len(linSepLoss))
|
if debugPrint:
|
||||||
return sum(errSq)/len(errSq) + 0.005*regressionLoss + 0.2*boundsLoss/len(ratedBooks) - 0.5*sum(linSepLoss)/len(linSepLoss)
|
print(sum(errSq)/len(errSq), 0.005*regressionLoss, 0.2*boundsLoss/len(ratedBooks), 1.0*sum(linSepLoss)/len(linSepLoss))
|
||||||
|
return sum(errSq)/len(errSq) + 0.005*regressionLoss + 0.2*boundsLoss/len(ratedBooks) - 1.0*sum(linSepLoss)/len(linSepLoss)
|
||||||
|
|
||||||
def train(gamma = 1):
|
def train(gamma = 1):
|
||||||
global weights
|
global weights
|
||||||
|
Loading…
Reference in New Issue
Block a user