From 368033f538dfa2bd5020ffaba6d9bca08d1afd6d Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Mon, 14 Jun 2021 12:41:56 +0300
Subject: [PATCH 01/18] Initial commit
---
bayes_net.py | 124 +++++++++++++++++++++++++++++++++++++++++++++++++++
page.html | 11 +++++
restart.sh | 14 ++++++
server.py | 63 ++++++++++++++++++++++----
4 files changed, 203 insertions(+), 9 deletions(-)
create mode 100644 bayes_net.py
create mode 100644 page.html
create mode 100755 restart.sh
diff --git a/bayes_net.py b/bayes_net.py
new file mode 100644
index 00000000..85a678a6
--- /dev/null
+++ b/bayes_net.py
@@ -0,0 +1,124 @@
+import random
+from difflib import SequenceMatcher
+
+
+
+class BayesNode:
+ def __init__(self, data):
+ self.outcomes = { }
+ self.total = 0
+ self.ranges = [ ]
+ self.data = data
+
+ def learn_outcome(self, node):
+ if node not in self.outcomes:
+ self.outcomes[node] = 0
+ self.outcomes[node] += 1
+ self.total += 1
+ self._regenerate_ranges()
+ print self.outcomes, self.total
+ print self.ranges
+
+ def predict_outcome(self):
+ i = random.randint(0, self.total)
+ for r in self.ranges:
+ if i >= r[0] and i <= r[1]:
+ return r[2]
+ return None
+
+ def _regenerate_ranges(self):
+ r = 0
+ ranges = [ ]
+ for n in self.outcomes:
+ ranges.append(( r, r + self.outcomes[n], n))
+ r += self.outcomes[n]
+ self.ranges = ranges
+
+class ObjectStringAssociator:
+ def __init__(self):
+ self.objects = [ ]
+ def register_object(self, obj):
+ if obj not in self.objects:
+ self.objects.append(obj)
+ def find_closest(self, obj):
+ max_ratio = 0
+ max_obj = None
+ for o in self.objects:
+ r = SequenceMatcher(None, str(o), str(obj)).ratio()
+ if max_ratio < r:
+ max_ratio = r
+ max_obj = o
+ return max_obj
+
+class ObjectOrthogonalAssociator:
+ def __init__(self):
+ self.total_occurences = 0
+ self.attribute_registrator = [ ]
+ self.objects = [ ]
+ def register_object(self, obj):
+ ind = 0
+ for i in obj:
+ if ind >= len(self.attribute_registrator):
+ self.attribute_registrator.append([ ])
+ if i not in self.attribute_registrator[ind]:
+ self.attribute_registrator[ind].append(i)
+ ind += 1
+ if obj not in self.objects:
+ self.objects.append(obj)
+ self.total_occurences += 1
+ print self.attribute_registrator
+ def _calc_attr_match_metric(self, o1, o2, ind):
+ if type(o1) == str:
+ dist = float(abs(ord(o1[ind]) - ord(o2[ind])))
+ else:
+ dist = float(abs(int(o1[ind]) - int(o2[ind])))
+ weight = self.total_occurences / len(self.attribute_registrator[ind])
+ return dist / weight
+ def find_closest(self, obj):
+ min_obj = self.objects[0]
+ min_weight = int(4000000)
+ for o in self.objects:
+ w = 0.0
+ for ind in xrange(0, min(len(o), len(obj))):
+ w += self._calc_attr_match_metric(o, obj, ind)
+ if w < min_weight:
+ min_obj = o
+ min_weight = w
+ return min_obj
+
+class BayesNetwork:
+ def __init__(self, t):
+ self.hash_to_nodes = { }
+ self.nodes = { }
+ self.associator = t()
+ def learn_outcomes(self, objects):
+ for o in objects:
+ if hash(o) not in self.hash_to_nodes:
+ self.hash_to_nodes[hash(o)] = BayesNode(o)
+ self.associator.register_object(o)
+ nodes = [ self.hash_to_nodes[hash(o)] for o in objects ]
+ for i in xrange(0, len(nodes) - 1):
+ nodes[i].learn_outcome(nodes[i+1])
+ def predict_outcome(self, _o, steps):
+ objects = [ ]
+ if hash(_o) not in self.hash_to_nodes:
+ o = self.associator.find_closest(_o)
+ else:
+ o = _o
+ node = self.hash_to_nodes[hash(o)]
+ for i in xrange(0, steps):
+ node = node.predict_outcome()
+ if node == None:
+ return objects
+ objects.append(node.data)
+ return objects
+
+def test():
+ net = BayesNetwork(ObjectStringAssociator)
+
+ net.learn_outcomes([ "one", "two", "three" ])
+ net.learn_outcomes([ "four", "two", "four" ])
+ return net.predict_outcome("tone", 4)
+
+
+print test()
diff --git a/page.html b/page.html
new file mode 100644
index 00000000..3f841837
--- /dev/null
+++ b/page.html
@@ -0,0 +1,11 @@
+
+
+
+
+
diff --git a/restart.sh b/restart.sh
new file mode 100755
index 00000000..22c9a786
--- /dev/null
+++ b/restart.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+if [ -f pid_file ]
+then
+pid=`cat pid_file`
+kill $pid
+echo "kill $pid"
+fi
+
+python server.py &
+
+pid="$!"
+
+echo $pid > pid_file
\ No newline at end of file
diff --git a/server.py b/server.py
index 430d1dc7..dedffd7c 100644
--- a/server.py
+++ b/server.py
@@ -1,3 +1,7 @@
+#!/usr/bin/python
+from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
+from bayes_net import BayesNetwork, ObjectStringAssociator
+from cgi import parse_qs
import os
import http.server
import socketserver
@@ -5,15 +9,56 @@
from http import HTTPStatus
+PORT_NUMBER = 8080
+
+#This class will handles any incoming request from
+#the browser
+net = BayesNetwork(ObjectStringAssociator)
+
class Handler(http.server.SimpleHTTPRequestHandler):
- def do_GET(self):
- self.send_response(HTTPStatus.OK)
- self.end_headers()
- msg = 'Hello! you requested %s' % (self.path)
- self.wfile.write(msg.encode())
+
+ #Handler for the GET requests
+ def do_GET(self):
+ self.send_response(200)
+ self.send_header('Content-type','text/html')
+ self.end_headers()
+ # Send the html message
+ #print self.path
+ s = self.path.replace("%20", " ")
+ #print s
+ s = s.replace("/", "")
+ s = s.replace("?", "")
+ #print s
+ s = s.replace("'", "")
+ #print s
+ s = parse_qs(s)
+ print str(s)
+ if 'outcomes' in s:
+ outcomes = s['outcomes'][0].split(',')
+ s['outcomes'] = [ i.replace(" ", "") for i in outcomes ]
+ print str(s)
+ if 'submit' in s:
+ print "Learning outcomes: ", s['outcomes']
+ #self.wfile.write(str(s))
+ net.learn_outcomes(s['outcomes'])
+ if 'predict' in s:
+ print "Predicting outcomes: ", s['outcomes'], s['steps']
+ o = net.predict_outcome(s['outcomes'][-1], int(s['steps'][0]))
+ print o
+ self.wfile.write(str(o))
+ with open ("page.html", "r") as myfile:
+ data=myfile.read()
+ self.wfile.write(data)
+ return
+try:
+ #Create a web server and define the handler to manage the
+ #incoming request
+ port = int(os.getenv('PORT', 80))
+ print('Listening on port %s' % (port))
+ httpd = socketserver.TCPServer(('', port), Handler)
+ httpd.serve_forever()
-port = int(os.getenv('PORT', 80))
-print('Listening on port %s' % (port))
-httpd = socketserver.TCPServer(('', port), Handler)
-httpd.serve_forever()
+except KeyboardInterrupt:
+ print '^C received, shutting down the web server'
+ server.socket.close()
From c8845e409563f2523aca042c122265a7dbfa6463 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Mon, 14 Jun 2021 13:07:29 +0300
Subject: [PATCH 02/18] Fix identation
---
server.py | 20 ++++++++++----------
1 file changed, 10 insertions(+), 10 deletions(-)
diff --git a/server.py b/server.py
index dedffd7c..af72724c 100644
--- a/server.py
+++ b/server.py
@@ -23,16 +23,16 @@ def do_GET(self):
self.send_header('Content-type','text/html')
self.end_headers()
# Send the html message
- #print self.path
- s = self.path.replace("%20", " ")
- #print s
- s = s.replace("/", "")
- s = s.replace("?", "")
- #print s
- s = s.replace("'", "")
- #print s
- s = parse_qs(s)
- print str(s)
+ #print self.path
+ s = self.path.replace("%20", " ")
+ #print s
+ s = s.replace("/", "")
+ s = s.replace("?", "")
+ #print s
+ s = s.replace("'", "")
+ #print s
+ s = parse_qs(s)
+ print str(s)
if 'outcomes' in s:
outcomes = s['outcomes'][0].split(',')
s['outcomes'] = [ i.replace(" ", "") for i in outcomes ]
From 9870dd43c8c75acb9426767363a2acd307b30af2 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Mon, 14 Jun 2021 14:19:38 +0300
Subject: [PATCH 03/18] Fixed trivial things to port to Python3
---
bayes_net.py | 12 ++++++------
server.py | 20 ++++++++++----------
2 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/bayes_net.py b/bayes_net.py
index 85a678a6..85c22013 100644
--- a/bayes_net.py
+++ b/bayes_net.py
@@ -16,8 +16,8 @@ def learn_outcome(self, node):
self.outcomes[node] += 1
self.total += 1
self._regenerate_ranges()
- print self.outcomes, self.total
- print self.ranges
+ print (self.outcomes, self.total)
+ print (self.ranges)
def predict_outcome(self):
i = random.randint(0, self.total)
@@ -66,7 +66,7 @@ def register_object(self, obj):
if obj not in self.objects:
self.objects.append(obj)
self.total_occurences += 1
- print self.attribute_registrator
+ print (self.attribute_registrator)
def _calc_attr_match_metric(self, o1, o2, ind):
if type(o1) == str:
dist = float(abs(ord(o1[ind]) - ord(o2[ind])))
@@ -97,7 +97,7 @@ def learn_outcomes(self, objects):
self.hash_to_nodes[hash(o)] = BayesNode(o)
self.associator.register_object(o)
nodes = [ self.hash_to_nodes[hash(o)] for o in objects ]
- for i in xrange(0, len(nodes) - 1):
+ for i in range(0, len(nodes) - 1):
nodes[i].learn_outcome(nodes[i+1])
def predict_outcome(self, _o, steps):
objects = [ ]
@@ -106,7 +106,7 @@ def predict_outcome(self, _o, steps):
else:
o = _o
node = self.hash_to_nodes[hash(o)]
- for i in xrange(0, steps):
+ for i in range(0, steps):
node = node.predict_outcome()
if node == None:
return objects
@@ -121,4 +121,4 @@ def test():
return net.predict_outcome("tone", 4)
-print test()
+print (test())
diff --git a/server.py b/server.py
index af72724c..ad84defd 100644
--- a/server.py
+++ b/server.py
@@ -1,5 +1,5 @@
#!/usr/bin/python
-from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
+#from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
from bayes_net import BayesNetwork, ObjectStringAssociator
from cgi import parse_qs
import os
@@ -32,23 +32,23 @@ def do_GET(self):
s = s.replace("'", "")
#print s
s = parse_qs(s)
- print str(s)
+ print (str(s))
if 'outcomes' in s:
outcomes = s['outcomes'][0].split(',')
s['outcomes'] = [ i.replace(" ", "") for i in outcomes ]
- print str(s)
- if 'submit' in s:
- print "Learning outcomes: ", s['outcomes']
+ print (str(s))
+ if 'submit' in s:
+ print ("Learning outcomes: ", s['outcomes'])
#self.wfile.write(str(s))
net.learn_outcomes(s['outcomes'])
if 'predict' in s:
- print "Predicting outcomes: ", s['outcomes'], s['steps']
+ print ("Predicting outcomes: ", s['outcomes'], s['steps'])
o = net.predict_outcome(s['outcomes'][-1], int(s['steps'][0]))
- print o
+ print (o)
self.wfile.write(str(o))
- with open ("page.html", "r") as myfile:
+ with open ("page.html", "r") as myfile:
data=myfile.read()
- self.wfile.write(data)
+ self.wfile.write(data)
return
try:
@@ -60,5 +60,5 @@ def do_GET(self):
httpd.serve_forever()
except KeyboardInterrupt:
- print '^C received, shutting down the web server'
+ print ('^C received, shutting down the web server')
server.socket.close()
From 1ba3f3382f5fffbd92287c8e8d3c88f0773f85fa Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Tue, 15 Jun 2021 14:42:51 +0300
Subject: [PATCH 04/18] Get parse_qs from urllib.parse, because of Python 3.7
---
server.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/server.py b/server.py
index ad84defd..6b2309bd 100644
--- a/server.py
+++ b/server.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
#from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
from bayes_net import BayesNetwork, ObjectStringAssociator
-from cgi import parse_qs
+from urllib.parse import parse_qs
import os
import http.server
import socketserver
From cfd10db28ab99f5b64c63c5a4a90a8d5feb74622 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 24 Jun 2021 12:35:54 +0300
Subject: [PATCH 05/18] Encode str to bytes before sending through socket
---
server.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/server.py b/server.py
index 6b2309bd..6a6fa983 100644
--- a/server.py
+++ b/server.py
@@ -48,7 +48,7 @@ def do_GET(self):
self.wfile.write(str(o))
with open ("page.html", "r") as myfile:
data=myfile.read()
- self.wfile.write(data)
+ self.wfile.write(data.encode())
return
try:
From f3e04d4c731101a1d0b6888b88ef0069c811a857 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Sun, 18 Jul 2021 13:52:10 +0300
Subject: [PATCH 06/18] Encode as bytes
---
server.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/server.py b/server.py
index ad84defd..72149059 100644
--- a/server.py
+++ b/server.py
@@ -48,7 +48,7 @@ def do_GET(self):
self.wfile.write(str(o))
with open ("page.html", "r") as myfile:
data=myfile.read()
- self.wfile.write(data)
+ self.wfile.write(data.encode())
return
try:
From b3ad8871cbfdcceaf6d3cdebdc7f31f8981a704a Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 26 Aug 2021 14:53:36 +0300
Subject: [PATCH 07/18] Add process nodes
---
process_element.py | 107 +++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 107 insertions(+)
create mode 100644 process_element.py
diff --git a/process_element.py b/process_element.py
new file mode 100644
index 00000000..b8b2bc5c
--- /dev/null
+++ b/process_element.py
@@ -0,0 +1,107 @@
+import random
+from difflib import SequenceMatcher
+
+
+class ProcessNode:
+ def __init__(self):
+ self.in_node_list = [ ]
+ self.out_node_list = [ ]
+ self.state = None
+ def add_node(self, node, out_node = False, bidir = False, mutual = False):
+ if bidir:
+ self.in_node_list.append(node)
+ self.out_node_list.append(node)
+ else:
+ if out_node:
+ self.out_node_list.append(node)
+ else:
+ self.in_node_list.append(node)
+ if mutual:
+ node.add_node(self, out_node, False)
+ def remove_node(self, node, mutual = True):
+ if node in self.in_node_list:
+ i = self.in_node_list.index[node]
+ del self.in_node_list[i]
+ if node in self.out_node_list:
+ i = self.out_node_list.index[node]
+ del self.out_node_list[i]
+ if mutual:
+ node.remove_node(self, False)
+
+ def get_state(self):
+ return self.state
+
+ def process(self):
+ pass
+
+
+class BooleanLogicNode(ProcessNode):
+ def __init__(self, bits):
+ ProcessNode.__init__(self)
+ self.state = [ 0 for i in xrange(0, bits) ]
+
+class LogicOne(BooleanLogicNode):
+ def __init__(self, bits = 1):
+ BooleanLogicNode.__init__(self, bits)
+ self.state = [ 1 for i in xrange(0, bits) ]
+
+class LogicZero(BooleanLogicNode):
+ def __init__(self, bits = 1):
+ BooleanLogicNode.__init__(self, bits)
+
+class LogicAnd(BooleanLogicNode):
+ def __init__(self, bits):
+ BooleanLogicNode.__init__(self, bits)
+
+ def process(self):
+ res = all([ node.get_state() for node in self.in_node_list ])
+ self.state = [ int(res) for i in xrange(0, len(self.state)) ]
+
+class LogicOr(BooleanLogicNode):
+ def __init__(self, bits):
+ BooleanLogicNode.__init__(self, bits)
+
+ def process(self):
+ res = any([ node.get_state() for node in self.in_node_list ])
+ self.state = [ int(res) for i in xrange(0, len(self.state)) ]
+
+class LogicNot(BooleanLogicNode):
+ def __init__(self, bits):
+ BooleanLogicNode.__init__(self, bits)
+
+ def process(self):
+ self.state = [ int(not node.get_state()[0]) for node in self.in_node_list ]
+
+
+one = LogicOne()
+zero = LogicZero()
+
+and_el = LogicAnd(2)
+or_el = LogicOr(2)
+and_el.add_node(one)
+and_el.add_node(one)
+and_el.process()
+
+or_el.add_node(one)
+or_el.add_node(zero)
+or_el.process()
+
+not_el = LogicNot(2)
+not_el.add_node(or_el)
+not_el.add_node(and_el)
+not_el.process()
+
+not_el2 = LogicNot(2)
+not_el2.add_node(not_el)
+not_el2.add_node(zero)
+not_el2.process()
+
+print and_el.get_state()
+print or_el.get_state()
+print not_el.get_state()
+print not_el2.get_state()
+
+
+
+
+
From 6c8f84fb472342a098aa42285c4cd35398f72265 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 20 Jan 2022 16:49:11 +0200
Subject: [PATCH 08/18] Added SQLite db for Bayesian network storage
---
bayes_net.py | 149 +++++++++++++++++++++++++++++++++++++++++----------
page.html | 25 +++++++--
restart.sh | 4 +-
server.py | 142 ++++++++++++++++++++++++++++++++++++------------
4 files changed, 251 insertions(+), 69 deletions(-)
diff --git a/bayes_net.py b/bayes_net.py
index 85c22013..907b231a 100644
--- a/bayes_net.py
+++ b/bayes_net.py
@@ -2,22 +2,45 @@
from difflib import SequenceMatcher
-
+# Each Bayes network node is associated with a few events,
+# each event can occur with its own probability
+# when we learn we accumulate this probability and then
+# synthesize it aftificially using ranges which ratios
+# from total amount of all occurences vs occurences of
+# each event.
class BayesNode:
def __init__(self, data):
self.outcomes = { }
- self.total = 0
+ self.total = 0.0
self.ranges = [ ]
self.data = data
def learn_outcome(self, node):
if node not in self.outcomes:
- self.outcomes[node] = 0
- self.outcomes[node] += 1
+ self.outcomes[node] = 0.0
+ self.outcomes[node] += 1.0
self.total += 1
self._regenerate_ranges()
- print (self.outcomes, self.total)
- print (self.ranges)
+ print "Node ", self.data, "learned outcome ", node.data, self.outcomes, self.total, "prob ", self.outcomes[node] / self.total
+ print self.outcomes, self.total
+ print self.ranges
+
+ def print_info(self):
+ print "Node ", self.data
+ print "======================================="
+ print "Outcomes:"
+ for node in self.outcomes:
+ print "Node", node.data, "hits", self.outcomes[node], "prob", self.outcomes[node] / self.total
+ print "======================================="
+
+ def print_info_str(self):
+ s = "Node " + str(self.data) + "
"
+ s += "=======================================
"
+ s += "Outcomes:
"
+ for node in self.outcomes:
+ s += "Node " + str(node.data) + " hits " + str(self.outcomes[node]) + " prob " + str(self.outcomes[node] / self.total) + "
"
+ s += "=======================================
"
+ return s
def predict_outcome(self):
i = random.randint(0, self.total)
@@ -29,11 +52,18 @@ def predict_outcome(self):
def _regenerate_ranges(self):
r = 0
ranges = [ ]
+
for n in self.outcomes:
ranges.append(( r, r + self.outcomes[n], n))
r += self.outcomes[n]
self.ranges = ranges
+# If we get some state which we haven't learned yet,
+# we need to do some clasification to associate it
+# with something we know.
+# There can be multiple ways to do that, currently
+# those are using minimum string distance, if we consider
+# the state as a whole
class ObjectStringAssociator:
def __init__(self):
self.objects = [ ]
@@ -50,6 +80,21 @@ def find_closest(self, obj):
max_obj = o
return max_obj
+# for non correlated events it is better to use
+# weighted approach as not all factors are equally
+# valueble in informational sense.
+# The factors which tend to have more different states
+# have less weight than those which have less degrees
+# of freedom
+# i.e here we go through all the attributes we know
+# checking if attribute value has been seen already,
+# if it hasn't been seen the value is added to the
+# attribute.
+# If the attribute itself haven't been seen - it is
+# added to the attributes list
+# Then weight of each attribute is calculated as
+# total_occurences of all attributes divided by
+# number of degrees of freedom
class ObjectOrthogonalAssociator:
def __init__(self):
self.total_occurences = 0
@@ -66,59 +111,105 @@ def register_object(self, obj):
if obj not in self.objects:
self.objects.append(obj)
self.total_occurences += 1
- print (self.attribute_registrator)
- def _calc_attr_match_metric(self, o1, o2, ind):
- if type(o1) == str:
- dist = float(abs(ord(o1[ind]) - ord(o2[ind])))
- else:
- dist = float(abs(int(o1[ind]) - int(o2[ind])))
+ print self.attribute_registrator
+ def _calc_weight(self, ind):
+ print "Attr: ", self.attribute_registrator[ind]
weight = self.total_occurences / len(self.attribute_registrator[ind])
- return dist / weight
+ return weight
def find_closest(self, obj):
- min_obj = self.objects[0]
- min_weight = int(4000000)
+ max_obj = self.objects[0]
+ max_weight = int(0)
for o in self.objects:
w = 0.0
for ind in xrange(0, min(len(o), len(obj))):
- w += self._calc_attr_match_metric(o, obj, ind)
- if w < min_weight:
- min_obj = o
- min_weight = w
- return min_obj
+ if type(o) == str:
+ r = SequenceMatcher(None, str(o), str(obj)).ratio()
+ w += self._calc_weight(ind) * r
+ else:
+ diff = float(abs(int(o[ind]) - int(obj[ind])))
+ w += self._calc_weight(ind) / diff
+ print o, "Weight: ", w
+ if w > max_weight:
+ max_obj = o
+ max_weight = w
+ return max_obj
class BayesNetwork:
def __init__(self, t):
self.hash_to_nodes = { }
- self.nodes = { }
+ self.nodes = [ ]
self.associator = t()
def learn_outcomes(self, objects):
for o in objects:
if hash(o) not in self.hash_to_nodes:
- self.hash_to_nodes[hash(o)] = BayesNode(o)
+ print o, " is not in ", self.hash_to_nodes, hash(o)
+ node = BayesNode(o)
+ self.hash_to_nodes[hash(o)] = node
+ self.nodes.append(node)
self.associator.register_object(o)
nodes = [ self.hash_to_nodes[hash(o)] for o in objects ]
- for i in range(0, len(nodes) - 1):
+ for i in xrange(0, len(nodes) - 1):
nodes[i].learn_outcome(nodes[i+1])
def predict_outcome(self, _o, steps):
objects = [ ]
+ print self.hash_to_nodes
if hash(_o) not in self.hash_to_nodes:
+
o = self.associator.find_closest(_o)
else:
o = _o
+ print "Closest is: ", o
node = self.hash_to_nodes[hash(o)]
- for i in range(0, steps):
+ print "Node is ", node, node.outcomes
+ for i in xrange(0, steps):
node = node.predict_outcome()
if node == None:
return objects
objects.append(node.data)
return objects
-def test():
+ def print_info(self):
+ for node in self.nodes:
+ node.print_info()
+
+ def print_info_str(self):
+ s = ""
+ for node in self.nodes:
+ s += node.print_info_str()
+ return s
+
+def test_orthogonal_associator():
+ net = BayesNetwork(ObjectOrthogonalAssociator)
+ net.learn_outcomes([ "Human", "Stanislav" ])
+ net.learn_outcomes([ "Human", "Jane" ])
+ net.learn_outcomes([ "Human", "Slava" ])
+ net.learn_outcomes([ "Dog", "Mahmud" ])
+ net.learn_outcomes([ "Dog", "Marianna" ])
+ net.learn_outcomes([ "Slava", "Wolfy" ])
+ net.learn_outcomes([ "Dog", "Wolfy" ])
+ print net.predict_outcome("Doglava", 2)
+
+def test_string_associator():
net = BayesNetwork(ObjectStringAssociator)
+ net.learn_outcomes([ "Human", "Stanislav" ])
+ net.learn_outcomes([ "Human", "Jane" ])
+ net.learn_outcomes([ "Human", "Slava" ])
+ net.learn_outcomes([ "Dog", "Mahmud" ])
+ net.learn_outcomes([ "Dog", "Marianna" ])
+ net.learn_outcomes([ "Slava", "Wolfy" ])
+ net.learn_outcomes([ "Dog", "Wolfy" ])
+ print net.predict_outcome("Doglava", 2)
+
+
+
+def test():
+ #net = BayesNetwork(ObjectOrthogonalAssociator)
- net.learn_outcomes([ "one", "two", "three" ])
- net.learn_outcomes([ "four", "two", "four" ])
- return net.predict_outcome("tone", 4)
+ #net.learn_outcomes([ "one", "two", "three" ])
+ #net.learn_outcomes([ "four", "two", "four" ])
+ test_orthogonal_associator()
+ test_string_associator()
+ #return net.predict_outcome("tone", 4)
-print (test())
+test()
diff --git a/page.html b/page.html
index 3f841837..6b60d142 100644
--- a/page.html
+++ b/page.html
@@ -1,11 +1,28 @@
+
diff --git a/restart.sh b/restart.sh
index 22c9a786..2e249c8d 100755
--- a/restart.sh
+++ b/restart.sh
@@ -4,10 +4,10 @@ if [ -f pid_file ]
then
pid=`cat pid_file`
kill $pid
-echo "kill $pid"
+echo "killed $pid"
fi
-python server.py &
+python server.py 2>&1
pid="$!"
diff --git a/server.py b/server.py
index fe93cda5..e3bfce2b 100644
--- a/server.py
+++ b/server.py
@@ -1,64 +1,138 @@
#!/usr/bin/python
-#from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
-from bayes_net import BayesNetwork, ObjectStringAssociator
-from urllib.parse import parse_qs
-import os
-import http.server
-import socketserver
-from http import HTTPStatus
+import sqlite3
+import pickle
+import os
+from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
+from bayes_net import BayesNetwork, ObjectStringAssociator
+from cgi import parse_qs
PORT_NUMBER = 8080
-#This class will handles any incoming request from
-#the browser
-net = BayesNetwork(ObjectStringAssociator)
+#This class will handle any incoming request from
+#the browser
+net = None
+
+conn = None
+ablob = None
+net_id = -1
-class Handler(http.server.SimpleHTTPRequestHandler):
+class myHandler(BaseHTTPRequestHandler):
#Handler for the GET requests
def do_GET(self):
+ global net_id
+ global net
+ global ablob
+ global conn
+
self.send_response(200)
self.send_header('Content-type','text/html')
self.end_headers()
# Send the html message
- #print self.path
- s = self.path.replace("%20", " ")
- #print s
- s = s.replace("/", "")
- s = s.replace("?", "")
- #print s
- s = s.replace("'", "")
- #print s
- s = parse_qs(s)
- print (str(s))
+ #print self.path
+ s = self.path.replace("%20", " ")
+ #print s
+ s = s.replace("/", "")
+ s = s.replace("?", "")
+ #print s
+ s = s.replace("'", "")
+ #print s
+ s = parse_qs(s)
+ print str(s)
if 'outcomes' in s:
outcomes = s['outcomes'][0].split(',')
s['outcomes'] = [ i.replace(" ", "") for i in outcomes ]
- print (str(s))
- if 'submit' in s:
- print ("Learning outcomes: ", s['outcomes'])
+ print str(s)
+ if "Net id" in s and 'load' in s:
+ last_net_id = net_id
+ net_id = int(s["Net id"][0])
+ if last_net_id != net_id:
+ ablob = load_blob_from_db("db", net_id)
+ if ablob != None:
+ print "Loaded blob"
+ net = load_net_from_blob(ablob)
+ else:
+ print "No blob found"
+ net = BayesNetwork(ObjectStringAssociator)
+ last_net_id = net_id
+ self.wfile.write(net.print_info_str())
+ if 'submit' in s:
+ print "Learning outcomes: ", s['outcomes']
#self.wfile.write(str(s))
+ if net == None:
+ net = BayesNetwork(ObjectStringAssociator)
net.learn_outcomes(s['outcomes'])
+ blob = pickle.dumps(net)
+ if ablob != None:
+ remove_blob("db", net_id)
+ insert_blob("db", blob, net_id)
if 'predict' in s:
- print ("Predicting outcomes: ", s['outcomes'], s['steps'])
+ print "Predicting outcomes: ", s['outcomes'], s['steps']
o = net.predict_outcome(s['outcomes'][-1], int(s['steps'][0]))
- print (o)
- self.wfile.write(str(o).encode())
- with open ("page.html", "r") as myfile:
+ print o
+ self.wfile.write(str(o))
+
+ with open ("page.html", "r") as myfile:
data=myfile.read()
- self.wfile.write(data.encode())
+ self.wfile.write(data)
return
+def create_or_open_db(db_file):
+ db_is_new = not os.path.exists(db_file)
+ conn = sqlite3.connect(db_file)
+ if db_is_new:
+ print 'Creating schema'
+ sql = '''create table if not exists NETS(
+ ID INTEGER,
+ BAYES_NET BLOB)'''
+ conn.execute(sql) # shortcut for conn.cursor().execute(sql)
+ else:
+ print 'Schema exists\n'
+ return conn
+
+def load_blob_from_db(db_file, net_id):
+ conn = create_or_open_db(db_file)
+ cur = conn.cursor()
+ print net_id
+ cur.execute("select * from NETS where ID=?", (net_id, ))
+ blob = cur.fetchone()
+ cur.close()
+ return blob
+
+def load_net_from_blob(blob):
+ net = pickle.loads(blob[1])
+ net.print_info()
+ return net
+
+def remove_blob(db_file, net_id):
+ conn = create_or_open_db(db_file)
+ sql = '''DELETE FROM NETS WHERE ID=?'''
+ conn.execute(sql, (net_id,))
+ conn.close()
+
+def insert_blob(db_file, blob, net_id):
+ conn = create_or_open_db(db_file)
+ sql = '''INSERT OR IGNORE INTO NETS
+ (ID, BAYES_NET)
+ VALUES(?,?);'''
+ conn.execute(sql, (net_id, sqlite3.Binary(blob)))
+ conn.commit()
+ sql = '''UPDATE NETS SET
+ BAYES_NET=? WHERE ID=0;'''
+ conn.execute(sql,[sqlite3.Binary(blob)])
+ conn.commit()
+ conn.close()
try:
#Create a web server and define the handler to manage the
#incoming request
- port = int(os.getenv('PORT', 80))
- print('Listening on port %s' % (port))
- httpd = socketserver.TCPServer(('', port), Handler)
- httpd.serve_forever()
+ server = HTTPServer(('', PORT_NUMBER), myHandler)
+ print 'Started httpserver on port ' , PORT_NUMBER
+
+ #Wait forever for incoming htto requests
+ server.serve_forever()
except KeyboardInterrupt:
- print ('^C received, shutting down the web server')
+ print '^C received, shutting down the web server'
server.socket.close()
From b220d8c92ebb112a24af71a412fa726b98fc8af3 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 20 Jan 2022 16:59:17 +0200
Subject: [PATCH 09/18] Formatted code "a bit"
---
server.py | 208 ++++++++++++++++++++++++++++--------------------------
1 file changed, 108 insertions(+), 100 deletions(-)
diff --git a/server.py b/server.py
index e3bfce2b..81c967f0 100644
--- a/server.py
+++ b/server.py
@@ -4,135 +4,143 @@
import pickle
import os
-from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from bayes_net import BayesNetwork, ObjectStringAssociator
from cgi import parse_qs
PORT_NUMBER = 8080
-#This class will handle any incoming request from
-#the browser
+# This class will handle any incoming request from
+# the browser
net = None
conn = None
ablob = None
net_id = -1
+
class myHandler(BaseHTTPRequestHandler):
-
- #Handler for the GET requests
- def do_GET(self):
- global net_id
- global net
- global ablob
- global conn
-
- self.send_response(200)
- self.send_header('Content-type','text/html')
- self.end_headers()
- # Send the html message
- #print self.path
- s = self.path.replace("%20", " ")
- #print s
- s = s.replace("/", "")
- s = s.replace("?", "")
- #print s
- s = s.replace("'", "")
- #print s
- s = parse_qs(s)
- print str(s)
- if 'outcomes' in s:
- outcomes = s['outcomes'][0].split(',')
- s['outcomes'] = [ i.replace(" ", "") for i in outcomes ]
- print str(s)
- if "Net id" in s and 'load' in s:
- last_net_id = net_id
- net_id = int(s["Net id"][0])
- if last_net_id != net_id:
- ablob = load_blob_from_db("db", net_id)
- if ablob != None:
- print "Loaded blob"
- net = load_net_from_blob(ablob)
- else:
- print "No blob found"
- net = BayesNetwork(ObjectStringAssociator)
- last_net_id = net_id
- self.wfile.write(net.print_info_str())
- if 'submit' in s:
- print "Learning outcomes: ", s['outcomes']
- #self.wfile.write(str(s))
- if net == None:
- net = BayesNetwork(ObjectStringAssociator)
- net.learn_outcomes(s['outcomes'])
- blob = pickle.dumps(net)
- if ablob != None:
- remove_blob("db", net_id)
- insert_blob("db", blob, net_id)
- if 'predict' in s:
- print "Predicting outcomes: ", s['outcomes'], s['steps']
- o = net.predict_outcome(s['outcomes'][-1], int(s['steps'][0]))
- print o
- self.wfile.write(str(o))
-
- with open ("page.html", "r") as myfile:
- data=myfile.read()
- self.wfile.write(data)
- return
+
+ # Handler for the GET requests
+ def do_GET(self):
+ global net_id
+ global net
+ global ablob
+ global conn
+
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ # Send the html message
+ # print self.path
+ s = self.path.replace("%20", " ")
+ # print s
+ s = s.replace("/", "")
+ s = s.replace("?", "")
+ # print s
+ s = s.replace("'", "")
+ # print s
+ s = parse_qs(s)
+ print str(s)
+ if "outcomes" in s:
+ outcomes = s["outcomes"][0].split(",")
+ s["outcomes"] = [i.replace(" ", "") for i in outcomes]
+ print str(s)
+ if "Net id" in s and "load" in s:
+ last_net_id = net_id
+ net_id = int(s["Net id"][0])
+ if last_net_id != net_id:
+ ablob = load_blob_from_db("db", net_id)
+ if ablob != None:
+ print "Loaded blob"
+ net = load_net_from_blob(ablob)
+ else:
+ print "No blob found"
+ net = BayesNetwork(ObjectStringAssociator)
+ last_net_id = net_id
+ self.wfile.write(net.print_info_str())
+ if "submit" in s:
+ print "Learning outcomes: ", s["outcomes"]
+ # self.wfile.write(str(s))
+ if net == None:
+ net = BayesNetwork(ObjectStringAssociator)
+ net.learn_outcomes(s["outcomes"])
+ blob = pickle.dumps(net)
+ if ablob != None:
+ remove_blob("db", net_id)
+ insert_blob("db", blob, net_id)
+ if "predict" in s:
+ print "Predicting outcomes: ", s["outcomes"], s["steps"]
+ o = net.predict_outcome(s["outcomes"][-1], int(s["steps"][0]))
+ print o
+ self.wfile.write(str(o))
+
+ with open("page.html", "r") as myfile:
+ data = myfile.read()
+ self.wfile.write(data)
+ return
+
def create_or_open_db(db_file):
db_is_new = not os.path.exists(db_file)
conn = sqlite3.connect(db_file)
if db_is_new:
- print 'Creating schema'
- sql = '''create table if not exists NETS(
+ print "Creating schema"
+ sql = """create table if not exists NETS(
ID INTEGER,
- BAYES_NET BLOB)'''
- conn.execute(sql) # shortcut for conn.cursor().execute(sql)
+ BAYES_NET BLOB)"""
+ conn.execute(sql) # shortcut for conn.cursor().execute(sql)
else:
- print 'Schema exists\n'
+ print "Schema exists\n"
return conn
+
def load_blob_from_db(db_file, net_id):
- conn = create_or_open_db(db_file)
- cur = conn.cursor()
- print net_id
- cur.execute("select * from NETS where ID=?", (net_id, ))
- blob = cur.fetchone()
- cur.close()
- return blob
+ conn = create_or_open_db(db_file)
+ cur = conn.cursor()
+ print net_id
+ cur.execute("select * from NETS where ID=?", (net_id,))
+ blob = cur.fetchone()
+ cur.close()
+ return blob
+
def load_net_from_blob(blob):
- net = pickle.loads(blob[1])
- net.print_info()
- return net
+ net = pickle.loads(blob[1])
+ net.print_info()
+ return net
+
def remove_blob(db_file, net_id):
- conn = create_or_open_db(db_file)
- sql = '''DELETE FROM NETS WHERE ID=?'''
- conn.execute(sql, (net_id,))
- conn.close()
+ conn = create_or_open_db(db_file)
+ sql = """DELETE FROM NETS WHERE ID=?"""
+ conn.execute(sql, (net_id,))
+ conn.close()
+
def insert_blob(db_file, blob, net_id):
- conn = create_or_open_db(db_file)
- sql = '''INSERT OR IGNORE INTO NETS
+ conn = create_or_open_db(db_file)
+ sql = """INSERT OR IGNORE INTO NETS
(ID, BAYES_NET)
- VALUES(?,?);'''
- conn.execute(sql, (net_id, sqlite3.Binary(blob)))
- conn.commit()
- sql = '''UPDATE NETS SET
- BAYES_NET=? WHERE ID=0;'''
- conn.execute(sql,[sqlite3.Binary(blob)])
- conn.commit()
- conn.close()
+ VALUES(?,?);"""
+ conn.execute(sql, (net_id, sqlite3.Binary(blob)))
+ conn.commit()
+ sql = """UPDATE NETS SET
+ BAYES_NET=? WHERE ID=0;"""
+ conn.execute(sql, [sqlite3.Binary(blob)])
+ conn.commit()
+ conn.close()
+
+
try:
- #Create a web server and define the handler to manage the
- #incoming request
- server = HTTPServer(('', PORT_NUMBER), myHandler)
- print 'Started httpserver on port ' , PORT_NUMBER
-
- #Wait forever for incoming htto requests
- server.serve_forever()
+ # Create a web server and define the handler to manage the
+ # incoming request
+ server = HTTPServer(("", PORT_NUMBER), myHandler)
+ print "Started httpserver on port ", PORT_NUMBER
+
+ # Wait forever for incoming htto requests
+ server.serve_forever()
except KeyboardInterrupt:
- print '^C received, shutting down the web server'
- server.socket.close()
+ print "^C received, shutting down the web server"
+ server.socket.close()
From a56187aad59af2d54918b59ebc09baf029f5cd9a Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 20 Jan 2022 18:05:12 +0200
Subject: [PATCH 10/18] Switch to raw binary file writing instead of Sqlite as
DigitalOcean doesn't work properly with it.
Need to switch some other database option in future.
---
server.py | 55 +++++++++++++++++++++++++++++--------------------------
1 file changed, 29 insertions(+), 26 deletions(-)
diff --git a/server.py b/server.py
index 81c967f0..d151edae 100644
--- a/server.py
+++ b/server.py
@@ -1,6 +1,6 @@
#!/usr/bin/python
-import sqlite3
+#import sqlite3
import pickle
import os
@@ -50,7 +50,7 @@ def do_GET(self):
last_net_id = net_id
net_id = int(s["Net id"][0])
if last_net_id != net_id:
- ablob = load_blob_from_db("db", net_id)
+ ablob = load_blob_from_db_file("db", net_id)
if ablob != None:
print "Loaded blob"
net = load_net_from_blob(ablob)
@@ -60,6 +60,8 @@ def do_GET(self):
last_net_id = net_id
self.wfile.write(net.print_info_str())
if "submit" in s:
+ if net_id == -1:
+ net_id = int(s["Net id"][0])
print "Learning outcomes: ", s["outcomes"]
# self.wfile.write(str(s))
if net == None:
@@ -67,8 +69,8 @@ def do_GET(self):
net.learn_outcomes(s["outcomes"])
blob = pickle.dumps(net)
if ablob != None:
- remove_blob("db", net_id)
- insert_blob("db", blob, net_id)
+ remove_blob_from_file("db", net_id)
+ insert_blob_to_file("db", blob, net_id)
if "predict" in s:
print "Predicting outcomes: ", s["outcomes"], s["steps"]
o = net.predict_outcome(s["outcomes"][-1], int(s["steps"][0]))
@@ -104,33 +106,34 @@ def load_blob_from_db(db_file, net_id):
cur.close()
return blob
-
def load_net_from_blob(blob):
net = pickle.loads(blob[1])
net.print_info()
return net
-
-def remove_blob(db_file, net_id):
- conn = create_or_open_db(db_file)
- sql = """DELETE FROM NETS WHERE ID=?"""
- conn.execute(sql, (net_id,))
- conn.close()
-
-
-def insert_blob(db_file, blob, net_id):
- conn = create_or_open_db(db_file)
- sql = """INSERT OR IGNORE INTO NETS
- (ID, BAYES_NET)
- VALUES(?,?);"""
- conn.execute(sql, (net_id, sqlite3.Binary(blob)))
- conn.commit()
- sql = """UPDATE NETS SET
- BAYES_NET=? WHERE ID=0;"""
- conn.execute(sql, [sqlite3.Binary(blob)])
- conn.commit()
- conn.close()
-
+def create_or_open_db_from_file(db_file):
+ if not os.path.exists(db_file):
+ f = open(str(db_file), "w")
+ f.close()
+ return None
+
+def load_blob_from_db_file(db_file, net_id):
+ create_or_open_db_from_file(str(net_id))
+ myfile = open(str(net_id), "r")
+ data = myfile.read()
+ print data
+ myfile.close()
+ if len(data) == 0:
+ print "Empty file!"
+ return None
+ return (data, data)
+
+def remove_blob_from_file(db_file, net_id):
+ os.remove(str(net_id))
+
+def insert_blob_to_file(db_file, blob, net_id):
+ with open(str(net_id), "w") as myfile:
+ myfile.write(blob)
try:
# Create a web server and define the handler to manage the
From 7b09b3e252971f627d6c95a3c64990dd3fd84961 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 20 Jan 2022 18:14:21 +0200
Subject: [PATCH 11/18] Switch to python3...
---
bayes_net.py | 40 ++++++++++++++++++++--------------------
server.py | 30 +++++++++++++++---------------
2 files changed, 35 insertions(+), 35 deletions(-)
diff --git a/bayes_net.py b/bayes_net.py
index 907b231a..ef0a7039 100644
--- a/bayes_net.py
+++ b/bayes_net.py
@@ -21,17 +21,17 @@ def learn_outcome(self, node):
self.outcomes[node] += 1.0
self.total += 1
self._regenerate_ranges()
- print "Node ", self.data, "learned outcome ", node.data, self.outcomes, self.total, "prob ", self.outcomes[node] / self.total
- print self.outcomes, self.total
- print self.ranges
+ print("Node ", self.data, "learned outcome ", node.data, self.outcomes, self.total, "prob ", self.outcomes[node] / self.total)
+ print(self.outcomes, self.total)
+ print(self.ranges)
def print_info(self):
- print "Node ", self.data
- print "======================================="
- print "Outcomes:"
+ print("Node ", self.data)
+ print("=======================================")
+ print("Outcomes:")
for node in self.outcomes:
- print "Node", node.data, "hits", self.outcomes[node], "prob", self.outcomes[node] / self.total
- print "======================================="
+ print("Node", node.data, "hits", self.outcomes[node], "prob", self.outcomes[node] / self.total)
+ print("=======================================")
def print_info_str(self):
s = "Node " + str(self.data) + "
"
@@ -111,9 +111,9 @@ def register_object(self, obj):
if obj not in self.objects:
self.objects.append(obj)
self.total_occurences += 1
- print self.attribute_registrator
+ print(self.attribute_registrator)
def _calc_weight(self, ind):
- print "Attr: ", self.attribute_registrator[ind]
+ print("Attr: ", self.attribute_registrator[ind])
weight = self.total_occurences / len(self.attribute_registrator[ind])
return weight
def find_closest(self, obj):
@@ -121,14 +121,14 @@ def find_closest(self, obj):
max_weight = int(0)
for o in self.objects:
w = 0.0
- for ind in xrange(0, min(len(o), len(obj))):
+ for ind in range(0, min(len(o), len(obj))):
if type(o) == str:
r = SequenceMatcher(None, str(o), str(obj)).ratio()
w += self._calc_weight(ind) * r
else:
diff = float(abs(int(o[ind]) - int(obj[ind])))
w += self._calc_weight(ind) / diff
- print o, "Weight: ", w
+ print(o, "Weight: ", w)
if w > max_weight:
max_obj = o
max_weight = w
@@ -142,26 +142,26 @@ def __init__(self, t):
def learn_outcomes(self, objects):
for o in objects:
if hash(o) not in self.hash_to_nodes:
- print o, " is not in ", self.hash_to_nodes, hash(o)
+ print(o, " is not in ", self.hash_to_nodes, hash(o))
node = BayesNode(o)
self.hash_to_nodes[hash(o)] = node
self.nodes.append(node)
self.associator.register_object(o)
nodes = [ self.hash_to_nodes[hash(o)] for o in objects ]
- for i in xrange(0, len(nodes) - 1):
+ for i in range(0, len(nodes) - 1):
nodes[i].learn_outcome(nodes[i+1])
def predict_outcome(self, _o, steps):
objects = [ ]
- print self.hash_to_nodes
+ print(self.hash_to_nodes)
if hash(_o) not in self.hash_to_nodes:
o = self.associator.find_closest(_o)
else:
o = _o
- print "Closest is: ", o
+ print("Closest is: ", o)
node = self.hash_to_nodes[hash(o)]
- print "Node is ", node, node.outcomes
- for i in xrange(0, steps):
+ print("Node is ", node, node.outcomes)
+ for i in range(0, steps):
node = node.predict_outcome()
if node == None:
return objects
@@ -187,7 +187,7 @@ def test_orthogonal_associator():
net.learn_outcomes([ "Dog", "Marianna" ])
net.learn_outcomes([ "Slava", "Wolfy" ])
net.learn_outcomes([ "Dog", "Wolfy" ])
- print net.predict_outcome("Doglava", 2)
+ print(net.predict_outcome("Doglava", 2))
def test_string_associator():
net = BayesNetwork(ObjectStringAssociator)
@@ -198,7 +198,7 @@ def test_string_associator():
net.learn_outcomes([ "Dog", "Marianna" ])
net.learn_outcomes([ "Slava", "Wolfy" ])
net.learn_outcomes([ "Dog", "Wolfy" ])
- print net.predict_outcome("Doglava", 2)
+ print(net.predict_outcome("Doglava", 2))
diff --git a/server.py b/server.py
index d151edae..54f8d2e8 100644
--- a/server.py
+++ b/server.py
@@ -4,7 +4,7 @@
import pickle
import os
-from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from http.server import BaseHTTPRequestHandler, HTTPServer
from bayes_net import BayesNetwork, ObjectStringAssociator
from cgi import parse_qs
@@ -41,28 +41,28 @@ def do_GET(self):
s = s.replace("'", "")
# print s
s = parse_qs(s)
- print str(s)
+ print(str(s))
if "outcomes" in s:
outcomes = s["outcomes"][0].split(",")
s["outcomes"] = [i.replace(" ", "") for i in outcomes]
- print str(s)
+ print(str(s))
if "Net id" in s and "load" in s:
last_net_id = net_id
net_id = int(s["Net id"][0])
if last_net_id != net_id:
ablob = load_blob_from_db_file("db", net_id)
if ablob != None:
- print "Loaded blob"
+ print("Loaded blob")
net = load_net_from_blob(ablob)
else:
- print "No blob found"
+ print("No blob found")
net = BayesNetwork(ObjectStringAssociator)
last_net_id = net_id
self.wfile.write(net.print_info_str())
if "submit" in s:
if net_id == -1:
net_id = int(s["Net id"][0])
- print "Learning outcomes: ", s["outcomes"]
+ print("Learning outcomes: ", s["outcomes"])
# self.wfile.write(str(s))
if net == None:
net = BayesNetwork(ObjectStringAssociator)
@@ -72,9 +72,9 @@ def do_GET(self):
remove_blob_from_file("db", net_id)
insert_blob_to_file("db", blob, net_id)
if "predict" in s:
- print "Predicting outcomes: ", s["outcomes"], s["steps"]
+ print("Predicting outcomes: ", s["outcomes"], s["steps"])
o = net.predict_outcome(s["outcomes"][-1], int(s["steps"][0]))
- print o
+ print(o)
self.wfile.write(str(o))
with open("page.html", "r") as myfile:
@@ -87,20 +87,20 @@ def create_or_open_db(db_file):
db_is_new = not os.path.exists(db_file)
conn = sqlite3.connect(db_file)
if db_is_new:
- print "Creating schema"
+ print("Creating schema")
sql = """create table if not exists NETS(
ID INTEGER,
BAYES_NET BLOB)"""
conn.execute(sql) # shortcut for conn.cursor().execute(sql)
else:
- print "Schema exists\n"
+ print("Schema exists\n")
return conn
def load_blob_from_db(db_file, net_id):
conn = create_or_open_db(db_file)
cur = conn.cursor()
- print net_id
+ print(net_id)
cur.execute("select * from NETS where ID=?", (net_id,))
blob = cur.fetchone()
cur.close()
@@ -121,10 +121,10 @@ def load_blob_from_db_file(db_file, net_id):
create_or_open_db_from_file(str(net_id))
myfile = open(str(net_id), "r")
data = myfile.read()
- print data
+ print(data)
myfile.close()
if len(data) == 0:
- print "Empty file!"
+ print("Empty file!")
return None
return (data, data)
@@ -139,11 +139,11 @@ def insert_blob_to_file(db_file, blob, net_id):
# Create a web server and define the handler to manage the
# incoming request
server = HTTPServer(("", PORT_NUMBER), myHandler)
- print "Started httpserver on port ", PORT_NUMBER
+ print("Started httpserver on port ", PORT_NUMBER)
# Wait forever for incoming htto requests
server.serve_forever()
except KeyboardInterrupt:
- print "^C received, shutting down the web server"
+ print("^C received, shutting down the web server")
server.socket.close()
From 478d199d6a60eef6366225a8c91787cc23535f69 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Tue, 15 Jun 2021 14:42:51 +0300
Subject: [PATCH 12/18] Get parse_qs from urllib.parse, because of Python 3.7
---
server.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/server.py b/server.py
index 54f8d2e8..4b232249 100644
--- a/server.py
+++ b/server.py
@@ -6,7 +6,10 @@
import os
from http.server import BaseHTTPRequestHandler, HTTPServer
from bayes_net import BayesNetwork, ObjectStringAssociator
-from cgi import parse_qs
+from urllib.parse import parse_qs
+import os
+import http.server
+import socketserver
PORT_NUMBER = 8080
From 85f626316f51281324b5a49f7d00bcebeea9d9bf Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 20 Jan 2022 18:22:24 +0200
Subject: [PATCH 13/18] Encode as bytes
---
server.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/server.py b/server.py
index 4b232249..2b08f9ef 100644
--- a/server.py
+++ b/server.py
@@ -82,7 +82,7 @@ def do_GET(self):
with open("page.html", "r") as myfile:
data = myfile.read()
- self.wfile.write(data)
+ self.wfile.write(data.encode())
return
From 64bb90aad561df5ecdfd29171a51030a6aa36da7 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 20 Jan 2022 18:29:40 +0200
Subject: [PATCH 14/18] Write blob as bytes
---
server.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/server.py b/server.py
index 2b08f9ef..2deba2c5 100644
--- a/server.py
+++ b/server.py
@@ -135,7 +135,7 @@ def remove_blob_from_file(db_file, net_id):
os.remove(str(net_id))
def insert_blob_to_file(db_file, blob, net_id):
- with open(str(net_id), "w") as myfile:
+ with open(str(net_id), "wb") as myfile:
myfile.write(blob)
try:
From 6f59413db230c7eb2ae94d1ac0913ffb107df43c Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Thu, 20 Jan 2022 18:33:05 +0200
Subject: [PATCH 15/18] Change file write encoding to bytes.
---
server.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/server.py b/server.py
index 2deba2c5..02e96607 100644
--- a/server.py
+++ b/server.py
@@ -61,7 +61,7 @@ def do_GET(self):
print("No blob found")
net = BayesNetwork(ObjectStringAssociator)
last_net_id = net_id
- self.wfile.write(net.print_info_str())
+ self.wfile.write(net.print_info_str().encode())
if "submit" in s:
if net_id == -1:
net_id = int(s["Net id"][0])
@@ -78,7 +78,7 @@ def do_GET(self):
print("Predicting outcomes: ", s["outcomes"], s["steps"])
o = net.predict_outcome(s["outcomes"][-1], int(s["steps"][0]))
print(o)
- self.wfile.write(str(o))
+ self.wfile.write(str(o).encode())
with open("page.html", "r") as myfile:
data = myfile.read()
@@ -122,7 +122,7 @@ def create_or_open_db_from_file(db_file):
def load_blob_from_db_file(db_file, net_id):
create_or_open_db_from_file(str(net_id))
- myfile = open(str(net_id), "r")
+ myfile = open(str(net_id), "rb")
data = myfile.read()
print(data)
myfile.close()
From 2b6e8f2ddb7a50ec669bf4ec9c57edd0e4914689 Mon Sep 17 00:00:00 2001
From: Stanislav Lisovskiy
Date: Mon, 24 Jan 2022 16:52:20 +0200
Subject: [PATCH 16/18] Add possibility to upload input data as a file.
---
page.html | 23 ++++++++++++++++++++---
server.py | 1 -
2 files changed, 20 insertions(+), 4 deletions(-)
diff --git a/page.html b/page.html
index 6b60d142..17e58e22 100644
--- a/page.html
+++ b/page.html
@@ -17,12 +17,29 @@
Network id to manipulate
Example: one, two, three
-Number of forecast steps
-
+
Click on the "Choose File" button to upload a file. Events should be comma separated.
+
Load Bayes network
-Learn
+Learn
+
Predict
+Number of forecast steps
+
+
+