From 08d4dde36e8d6c2c02d8ae4ad5b135e91dae7c8d Mon Sep 17 00:00:00 2001 From: Vanessa Yaremchuk Date: Fri, 30 Mar 2012 17:36:21 -0400 Subject: [PATCH 1/5] Added a command line option for recurrent networks. --- learn_gest.py | 546 ++++++++++++++++++++++++++------------------------ 1 file changed, 285 insertions(+), 261 deletions(-) diff --git a/learn_gest.py b/learn_gest.py index 4446237..ea75c31 100644 --- a/learn_gest.py +++ b/learn_gest.py @@ -1,261 +1,285 @@ -from pybrain.supervised.trainers import BackpropTrainer -from pybrain.tools.shortcuts import buildNetwork -from pybrain.datasets import SupervisedDataSet -import mapper -import Tkinter - -import tkFileDialog -import sys -#./mapperRec.exe -m tkgui -b file -f tkgui.txt - -if (len(sys.argv)==4): - #print (sys.argv) - try: - num_inputs=int(sys.argv[1]) - num_hidden=int(sys.argv[2]) - num_outputs=int(sys.argv[3]) - print ("Input Arguments (#inputs, #hidden nodes, #outputs): " + str(num_inputs) + ", " + str(num_hidden) + ", " + str(num_outputs) ) - except: - print ("Bad Input Arguments (#inputs, #hidden nodes, #outputs)") - sys.exit(1) -elif (len(sys.argv)>1): - print ("Bad Input Arguments (#inputs, #hidden nodes, #outputs)") - sys.exit(1) - -else: - #number of network inputs - num_inputs=8 - #number of network outputs - num_outputs=8 - #number of hidden nodes - num_hidden=5 - print ("No Input Arguments (#inputs, #hidden nodes, #outputs), defaulting to: " + str(num_inputs) + ", " + str(num_hidden) + ", " + str(num_outputs) ) -#instatiate mapper -l_map=mapper.device("learn_mapper",9000) - -l_inputs={} -l_outputs={} -data_input={} -data_output={} -learning = 0 -compute = 0 - -for s_index in range(num_inputs): - data_input[s_index+10]=0.0 -# data_input[s_index]=0.0 - -for s_index in range (num_outputs): - data_output[s_index]=0.0 - - -sliders={} - -master=Tkinter.Tk() -master.title("PyBrain Mapper Demo") -master.resizable(height=True, width=True) -master.geometry("500x500") - -def main_loop(): - global ds - if ((learning==1) and (compute ==0)): - print ("Inputs: ") - print (tuple(data_input.values())) - print ("Outputs: ") - print (tuple( data_output.values())) -# print ("input/output values: " + (tuple(data_input.values()),tuple(data_output.values()))) - ds.addSample(tuple(data_input.values()),tuple(data_output.values())) -# if learning==1: -# pass - - -# if compute ==1: -# pass - l_map.poll(0) - - - -def on_gui_change(x,s_index): -# s_index=0 - try: - #print "in callback: on gui change" - #print x,s_index - - global data_output - if (compute==0): - data_output[s_index]=float(x)/100.0 - l_outputs[s_index].update(float(x)/100.0) - #print ("on gui change: ", data_output) - - except: - print ("WTF MATE? On Gui Change Error!") - raise - -for s_index in range(num_outputs): - def tc(s_index): - return lambda x: on_gui_change(x,s_index) - - sliders[s_index]=Tkinter.Scale(master,from_=0,to=100, label='output'+str(s_index),orient=Tkinter.HORIZONTAL,length=300,command=tc(s_index)) - sliders[s_index].pack() - - - -def learn_callback(): - global learning - - if learning == 1: - b_learn.config(relief='raised',text="Acquire Training Data (OFF)",bg='gray') - learning=0 - - print ("learning is now OFF") - elif learning ==0: - b_learn.config(relief='sunken',text="Acquiring Training Data (ON)",bg='red') - learning=1 - print ("learning is now ON") - - - print ("learning is", learning) - #b.learn_on.text="Acquire Training Data (ON)" - -def compute_callback(): - global compute - global net - global ds - if compute==1: - b_compute.config(relief='raised',text="Press to compute network outputs (OFF)",bg='gray') - compute =0 - print ("Compute network output is now OFF!") - elif compute ==0: - - #trainer.trainUntilConvergence() - b_compute.config(relief='sunken',text="Computing network outputs(ON)",bg='coral') - compute =1 - print ("Comput network output is now ON!") - #print(dir(ds)) - #print(ds['target'][0]) - #print(ds['target'][1]) - #print(ds[1,0]) - #print(ds[1,1]) - -def train_callback(): - trainer = BackpropTrainer(net, ds) - for train_round in range (40): - print(trainer.train()) - print (trainer) - - -def clear_dataset(): - ds.clear() -def save_dataset(): - save_filename = tkFileDialog.asksaveasfilename() - ds.saveToFile(save_filename) - csv_file=open(save_filename+".csv",'w') - csv_file.write("[inputs][outputs]\r\n") - for inpt, tgt in ds: - new_str=str("{" + repr(inpt) + "," + repr(tgt) + "}") -# (repr(inpt) + repr(tgt)) - new_str=new_str.strip('\n') - new_str=new_str.strip('\r') - new_str=new_str+"\r" - print(repr(new_str)) - csv_file.write(new_str) - csv_file.close() -def load_dataset(): - open_filename = tkFileDialog.askopenfilename() - global ds - ds=SupervisedDataSet.loadFromFile(open_filename) - -def save_net(): - from pybrain.tools.customxml import networkwriter - save_filename = tkFileDialog.asksaveasfilename() - networkwriter.NetworkWriter.writeToFile(net,save_filename) -def load_net(): - from pybrain.tools.customxml import networkreader - open_filename = tkFileDialog.askopenfilename() - global net - net=networkreader.NetworkReader.readFrom(open_filename) - -b_learn = Tkinter.Button(master, text="Acquire Training Data (OFF)", command=learn_callback) -b_learn.pack() -b_train =Tkinter.Button(master, text="Train Network", command=train_callback) -b_train.pack() -b_compute = Tkinter.Button(master, text="Compute Network Outputs", command=compute_callback) -b_compute.pack() - -b_clear_data=Tkinter.Button(master, text="Clear data set",command = clear_dataset) -b_clear_data.pack() -b_save_dataset=Tkinter.Button(master, text='Save Current DataSet to file',command=save_dataset) -b_save_dataset.pack() -b_load_dataset=Tkinter.Button(master, text='Load DataSet from File',command=load_dataset) -b_load_dataset.pack() -b_save_net=Tkinter.Button(master, text='Save Current Network to File',command=save_net) -b_save_net.pack() -b_load_net=Tkinter.Button(master, text='Load Network from File',command=load_net) -b_load_net.pack() - - -def ontimer(): - #print 'someshit' - main_loop() - # check the serial port - master.after(10, ontimer) - - - - -#mapper signal handler (updates data_input[sig_indx]=new_float_value) -def h(sig, f): - try: - #print "mapper signal handler" - #print (sig.name, f) - - s_indx=str.split(sig.name,"/input/") -# print sig.name - global data_input - global data_output - data_input[int(s_indx[1])]=float(f/100.0) -# print(int(s_indx[1]),data_input[int(s_indx[1])]) - -# if (learning==1): -# print(int(s_indx[1]),data_input[int(s_indx[1])]) - if ((compute==1) and (learning==0)): - #print ("inputs to net: ",data_input) - activated_out=net.activate(tuple(data_input.values())) - #print ("Activated outs: ", activated_out) - for out_index in range(num_outputs): - data_output[out_index]=activated_out[out_index] - sliders[out_index].set(int(activated_out[out_index]*100.0)) - l_outputs[out_index].update(data_output[out_index]) - except: - print "WTF, h handler not working" - -#create mapper signals (inputs) -for l_num in range(num_inputs): - l_inputs[l_num]=l_map.add_input("/input/"+str(l_num+int(10)),'f',h,None,0,100.0) -# l_inputs[l_num]=l_map.add_input("/input/"+str(l_num),'f',h,None,0,100.0) - l_map.poll(0) - print ("creating input", "/input/"+str(l_num+int(10))) -# print ("creating input", "/input/"+str(l_num)) - -#create mapper signals (outputs) -for l_num in range(num_outputs): -# l_outputs[l_num]=l_map.add_output("/output/"+str(l_num+int(10)),'f',None,0,1) - l_outputs[l_num]=l_map.add_output("/output/"+str(l_num),'f',None,0,1) - l_map.poll(0) -# print ("creating output","/output/"+str(l_num+int(10))) - print ("creating output","/output/"+str(l_num)) - -#create network -net = buildNetwork(num_inputs,num_hidden,num_outputs,bias=True) -#create dataSet -ds = SupervisedDataSet(num_inputs, num_outputs) - -#while (True): - - - -ontimer() - #master.after(500, ontimer) -master.protocol("WM_DELETE_WINDOW", master.quit) -master.mainloop() -master.destroy() -del master +from pybrain.supervised.trainers import BackpropTrainer +from pybrain.tools.shortcuts import buildNetwork +from pybrain.datasets import SupervisedDataSet +from pybrain.structure.modules import SigmoidLayer +import mapper +import Tkinter + +import tkFileDialog +import sys +#./mapperRec.exe -m tkgui -b file -f tkgui.txt + +recurrent_flag=False; # default case is a nonrecurrent feedforward network + +if (len(sys.argv)==4): + #print (sys.argv) + try: + num_inputs=int(sys.argv[1]) + num_hidden=int(sys.argv[2]) + num_outputs=int(sys.argv[3]) + print ("Input Arguments (#inputs, #hidden nodes, #outputs): " + str(num_inputs) + ", " + str(num_hidden) + ", " + str(num_outputs) ) + except: + print ("Bad Input Arguments (#inputs, #hidden nodes, #outputs)") + sys.exit(1) +elif (len(sys.argv)==5): + try: + num_inputs=int(sys.argv[1]) + num_hidden=int(sys.argv[2]) + num_outputs=int(sys.argv[3]) + if (sys.argv[4] == "R"): + recurrent_flag=True + elif (sys.argv[4] == "F"): + recurrent_flag=False + print ("Input Arguments (#inputs, #hidden nodes, #outputs): " + str(num_inputs) + ", " + str(num_hidden) + ", " + str(num_outputs) + ", recurrent = " + str(recurrent_flag)) + except: + print ("Bad Input Arguments (#inputs, #hidden nodes, #outputs, R/F == Recurrent/Feedforward)") + sys.exit(1) +elif (len(sys.argv)>1): + print ("Bad Input Arguments (#inputs, #hidden nodes, #outputs)") + sys.exit(1) + +else: + #number of network inputs + num_inputs=8 + #number of network outputs + num_outputs=8 + #number of hidden nodes + num_hidden=5 + print ("No Input Arguments (#inputs, #hidden nodes, #outputs), defaulting to: " + str(num_inputs) + ", " + str(num_hidden) + ", " + str(num_outputs) ) +#instatiate mapper +l_map=mapper.device("learn_mapper",9000) + +l_inputs={} +l_outputs={} +data_input={} +data_output={} +learning = 0 +compute = 0 + +for s_index in range(num_inputs): + data_input[s_index+10]=0.0 +# data_input[s_index]=0.0 + +for s_index in range (num_outputs): + data_output[s_index]=0.0 + + +sliders={} + +master=Tkinter.Tk() +master.title("PyBrain Mapper Demo") +master.resizable(height=True, width=True) +master.geometry("500x500") + +def main_loop(): + global ds + if ((learning==1) and (compute ==0)): + print ("Inputs: ") + print (tuple(data_input.values())) + print ("Outputs: ") + print (tuple( data_output.values())) +# print ("input/output values: " + (tuple(data_input.values()),tuple(data_output.values()))) + ds.addSample(tuple(data_input.values()),tuple(data_output.values())) +# if learning==1: +# pass + + +# if compute ==1: +# pass + l_map.poll(0) + + + +def on_gui_change(x,s_index): +# s_index=0 + try: + #print "in callback: on gui change" + #print x,s_index + + global data_output + if (compute==0): + data_output[s_index]=float(x)/100.0 + l_outputs[s_index].update(float(x)/100.0) + #print ("on gui change: ", data_output) + + except: + print ("WTF MATE? On Gui Change Error!") + raise + +for s_index in range(num_outputs): + def tc(s_index): + return lambda x: on_gui_change(x,s_index) + + sliders[s_index]=Tkinter.Scale(master,from_=0,to=100, label='output'+str(s_index),orient=Tkinter.HORIZONTAL,length=300,command=tc(s_index)) + sliders[s_index].pack() + + + +def learn_callback(): + global learning + + if learning == 1: + b_learn.config(relief='raised',text="Acquire Training Data (OFF)",bg='gray') + learning=0 + + print ("learning is now OFF") + elif learning ==0: + b_learn.config(relief='sunken',text="Acquiring Training Data (ON)",bg='red') + learning=1 + print ("learning is now ON") + + + print ("learning is", learning) + #b.learn_on.text="Acquire Training Data (ON)" + +def compute_callback(): + global compute + global net + global ds + if compute==1: + b_compute.config(relief='raised',text="Press to compute network outputs (OFF)",bg='gray') + compute =0 + print ("Compute network output is now OFF!") + elif compute ==0: + + #trainer.trainUntilConvergence() + b_compute.config(relief='sunken',text="Computing network outputs(ON)",bg='coral') + compute =1 + print ("Comput network output is now ON!") + #print(dir(ds)) + #print(ds['target'][0]) + #print(ds['target'][1]) + #print(ds[1,0]) + #print(ds[1,1]) + +def train_callback(): + trainer = BackpropTrainer(net, learningrate=0.01, lrdecay=1, momentum=0.0, verbose=True) + print 'MSE before', trainer.testOnData(ds, verbose=True) + trainer.trainUntilConvergence(ds, 2000) + print 'MSE after', trainer.testOnData(ds, verbose=True) + + +def clear_dataset(): + ds.clear() + +def clear_network(): + net.reset() + +def save_dataset(): + save_filename = tkFileDialog.asksaveasfilename() + ds.saveToFile(save_filename) + csv_file=open(save_filename+".csv",'w') + csv_file.write("[inputs][outputs]\r\n") + for inpt, tgt in ds: + new_str=str("{" + repr(inpt) + "," + repr(tgt) + "}") +# (repr(inpt) + repr(tgt)) + new_str=new_str.strip('\n') + new_str=new_str.strip('\r') + new_str=new_str+"\r" + print(repr(new_str)) + csv_file.write(new_str) + csv_file.close() +def load_dataset(): + open_filename = tkFileDialog.askopenfilename() + global ds + ds=SupervisedDataSet.loadFromFile(open_filename) + print ds + +def save_net(): + from pybrain.tools.customxml import networkwriter + save_filename = tkFileDialog.asksaveasfilename() + networkwriter.NetworkWriter.writeToFile(net,save_filename) +def load_net(): + from pybrain.tools.customxml import networkreader + open_filename = tkFileDialog.askopenfilename() + global net + net=networkreader.NetworkReader.readFrom(open_filename) + +b_learn = Tkinter.Button(master, text="Acquire Training Data (OFF)", command=learn_callback) +b_learn.pack() +b_train =Tkinter.Button(master, text="Train Network", command=train_callback) +b_train.pack() +b_compute = Tkinter.Button(master, text="Compute Network Outputs", command=compute_callback) +b_compute.pack() + +b_clear_data=Tkinter.Button(master, text="Clear data set",command = clear_dataset) +b_clear_data.pack() +b_clear_net=Tkinter.Button(master, text="Reset Network",command = clear_network) +b_clear_net.pack() +b_save_dataset=Tkinter.Button(master, text='Save Current DataSet to file',command=save_dataset) +b_save_dataset.pack() +b_load_dataset=Tkinter.Button(master, text='Load DataSet from File',command=load_dataset) +b_load_dataset.pack() +b_save_net=Tkinter.Button(master, text='Save Current Network to File',command=save_net) +b_save_net.pack() +b_load_net=Tkinter.Button(master, text='Load Network from File',command=load_net) +b_load_net.pack() + + +def ontimer(): + #print 'someshit' + main_loop() + # check the serial port + master.after(10, ontimer) + + + + +#mapper signal handler (updates data_input[sig_indx]=new_float_value) +def h(sig, f): + try: + #print "mapper signal handler" + #print (sig.name, f) + + s_indx=str.split(sig.name,"/input/") +# print sig.name + global data_input + global data_output + data_input[int(s_indx[1])]=float(f/100.0) +# print(int(s_indx[1]),data_input[int(s_indx[1])]) + +# if (learning==1): +# print(int(s_indx[1]),data_input[int(s_indx[1])]) + if ((compute==1) and (learning==0)): + #print ("inputs to net: ",data_input) + activated_out=net.activate(tuple(data_input.values())) + #print ("Activated outs: ", activated_out) + for out_index in range(num_outputs): + data_output[out_index]=activated_out[out_index] + sliders[out_index].set(int(activated_out[out_index]*100.0)) + l_outputs[out_index].update(data_output[out_index]) + except: + print "WTF, h handler not working" + +#create mapper signals (inputs) +for l_num in range(num_inputs): + l_inputs[l_num]=l_map.add_input("/input/"+str(l_num+int(10)),'f',h,None,0,100.0) +# l_inputs[l_num]=l_map.add_input("/input/"+str(l_num),'f',h,None,0,100.0) + l_map.poll(0) + print ("creating input", "/input/"+str(l_num+int(10))) +# print ("creating input", "/input/"+str(l_num)) + +#create mapper signals (outputs) +for l_num in range(num_outputs): +# l_outputs[l_num]=l_map.add_output("/output/"+str(l_num+int(10)),'f',None,0,1) + l_outputs[l_num]=l_map.add_output("/output/"+str(l_num),'f',None,0,1) + l_map.poll(0) +# print ("creating output","/output/"+str(l_num+int(10))) + print ("creating output","/output/"+str(l_num)) + +#create network +net = buildNetwork(num_inputs,num_hidden,num_outputs,bias=True, hiddenclass=SigmoidLayer, outclass=SigmoidLayer, recurrent=recurrent_flag) +#net = buildNetwork(num_inputs,num_hidden,num_outputs,bias=True) +#create dataSet +ds = SupervisedDataSet(num_inputs, num_outputs) + +#while (True): + + + +ontimer() + #master.after(500, ontimer) +master.protocol("WM_DELETE_WINDOW", master.quit) +master.mainloop() +master.destroy() +del master From 3b4d14ede526ee1b86201e5452063c5aeb9a1b7d Mon Sep 17 00:00:00 2001 From: Vanessa Yaremchuk Date: Wed, 16 May 2012 21:07:15 -0400 Subject: [PATCH 2/5] LGPL added --- LICENSE | 61 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e78965a --- /dev/null +++ b/LICENSE @@ -0,0 +1,61 @@ +;; This buffer is for notes you don't want to save, and for Lisp evaluation. +;; If you want to create a file, visit that file with C-x C-f, +;; then enter the text in that file's own buffer. + +GNU LESSER GENERAL PUBLIC LICENSE + +Version 3, 29 June 2007 + +Copyright © 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below. + +0. Additional Definitions. +As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License. + +“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below. + +An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library. + +A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”. + +The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version. + +The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work. + +1. Exception to Section 3 of the GNU GPL. +You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL. + +2. Conveying Modified Versions. +If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version: + +a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or +b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy. +3. Object Code Incorporating Material from Library Header Files. +The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following: + +a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License. +b) Accompany the object code with a copy of the GNU GPL and this license document. +4. Combined Works. +You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following: + +a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License. +b) Accompany the Combined Work with a copy of the GNU GPL and this license document. +c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document. +d) Do one of the following: +0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source. +1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version. +e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.) +5. Combined Libraries. +You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following: + +a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License. +b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. +6. Revised Versions of the GNU Lesser General Public License. +The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation. + +If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. \ No newline at end of file From 4fc2d23af4bb79f0a10aa14191db4479dc1036da Mon Sep 17 00:00:00 2001 From: Yaremchuk Date: Wed, 16 May 2012 22:40:46 -0300 Subject: [PATCH 3/5] Update LICENSE --- LICENSE | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/LICENSE b/LICENSE index e78965a..35ae22f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,4 @@ -;; This buffer is for notes you don't want to save, and for Lisp evaluation. -;; If you want to create a file, visit that file with C-x C-f, -;; then enter the text in that file's own buffer. + GNU LESSER GENERAL PUBLIC LICENSE From 9e97750dec3817830f892a9f959b7457f127d7d7 Mon Sep 17 00:00:00 2001 From: Yaremchuk Date: Thu, 19 Jul 2012 12:35:46 -0300 Subject: [PATCH 4/5] Changed where libmapper is polled to fix latency problem. --- learn_gest.py | 113 ++++++++++++++++++++++++-------------------------- 1 file changed, 54 insertions(+), 59 deletions(-) diff --git a/learn_gest.py b/learn_gest.py index ea75c31..6442a80 100644 --- a/learn_gest.py +++ b/learn_gest.py @@ -2,12 +2,13 @@ from pybrain.tools.shortcuts import buildNetwork from pybrain.datasets import SupervisedDataSet from pybrain.structure.modules import SigmoidLayer +from pybrain.tools.customxml import networkwriter import mapper import Tkinter +import re import tkFileDialog import sys -#./mapperRec.exe -m tkgui -b file -f tkgui.txt recurrent_flag=False; # default case is a nonrecurrent feedforward network @@ -47,7 +48,7 @@ num_hidden=5 print ("No Input Arguments (#inputs, #hidden nodes, #outputs), defaulting to: " + str(num_inputs) + ", " + str(num_hidden) + ", " + str(num_outputs) ) #instatiate mapper -l_map=mapper.device("learn_mapper",9000) +l_map=mapper.device("learn_mapper",9002) l_inputs={} l_outputs={} @@ -57,9 +58,8 @@ compute = 0 for s_index in range(num_inputs): - data_input[s_index+10]=0.0 -# data_input[s_index]=0.0 - + data_input[s_index]=0.0 +# data_input[s_index+10]=0.0 for s_index in range (num_outputs): data_output[s_index]=0.0 @@ -78,16 +78,15 @@ def main_loop(): print (tuple(data_input.values())) print ("Outputs: ") print (tuple( data_output.values())) -# print ("input/output values: " + (tuple(data_input.values()),tuple(data_output.values()))) - ds.addSample(tuple(data_input.values()),tuple(data_output.values())) -# if learning==1: -# pass - - -# if compute ==1: -# pass - l_map.poll(0) - + ds.addSample(tuple(data_input.values()),tuple(data_output.values())) + if (l_map.poll(1)) and ((compute==1) and (learning==0)): + #print "inputs to net: ", data_input + activated_out=net.activate(tuple(data_input.values())) + #print "Activated outs: ", activated_out + for out_index in range(num_outputs): + data_output[out_index]=activated_out[out_index] + sliders[out_index].set(activated_out[out_index]) + l_outputs[out_index].update(data_output[out_index]) def on_gui_change(x,s_index): @@ -98,9 +97,10 @@ def on_gui_change(x,s_index): global data_output if (compute==0): - data_output[s_index]=float(x)/100.0 - l_outputs[s_index].update(float(x)/100.0) - #print ("on gui change: ", data_output) + data_output[s_index]=float(x) + l_outputs[s_index].update(float(x)) + #print ("on gui change: ", data_output) + #l_map.poll(0) except: print ("WTF MATE? On Gui Change Error!") @@ -110,7 +110,7 @@ def on_gui_change(x,s_index): def tc(s_index): return lambda x: on_gui_change(x,s_index) - sliders[s_index]=Tkinter.Scale(master,from_=0,to=100, label='output'+str(s_index),orient=Tkinter.HORIZONTAL,length=300,command=tc(s_index)) + sliders[s_index]=Tkinter.Scale(master,from_=0,to=1, label='output'+str(s_index),orient=Tkinter.HORIZONTAL,length=300, resolution=0.01, command=tc(s_index)) sliders[s_index].pack() @@ -141,28 +141,28 @@ def compute_callback(): compute =0 print ("Compute network output is now OFF!") elif compute ==0: - - #trainer.trainUntilConvergence() b_compute.config(relief='sunken',text="Computing network outputs(ON)",bg='coral') compute =1 - print ("Comput network output is now ON!") - #print(dir(ds)) - #print(ds['target'][0]) - #print(ds['target'][1]) - #print(ds[1,0]) - #print(ds[1,1]) + print ("Compute network output is now ON!") def train_callback(): trainer = BackpropTrainer(net, learningrate=0.01, lrdecay=1, momentum=0.0, verbose=True) print 'MSE before', trainer.testOnData(ds, verbose=True) - trainer.trainUntilConvergence(ds, 2000) + epoch_count = 0 + while epoch_count < 1000: + epoch_count += 10 + trainer.trainUntilConvergence(dataset=ds, maxEpochs=10) + networkwriter.NetworkWriter.writeToFile(net,'autosave.network') print 'MSE after', trainer.testOnData(ds, verbose=True) - + print ("\n") + print 'Total epochs:', trainer.totalepochs def clear_dataset(): ds.clear() def clear_network(): + #resets the module buffers but doesn't reinitialise the connection weights + #TODO: reinitialise network here or make a new option for it. net.reset() def save_dataset(): @@ -172,11 +172,10 @@ def save_dataset(): csv_file.write("[inputs][outputs]\r\n") for inpt, tgt in ds: new_str=str("{" + repr(inpt) + "," + repr(tgt) + "}") -# (repr(inpt) + repr(tgt)) new_str=new_str.strip('\n') new_str=new_str.strip('\r') new_str=new_str+"\r" - print(repr(new_str)) + #print(repr(new_str)) csv_file.write(new_str) csv_file.close() def load_dataset(): @@ -186,7 +185,7 @@ def load_dataset(): print ds def save_net(): - from pybrain.tools.customxml import networkwriter + #from pybrain.tools.customxml import networkwriter save_filename = tkFileDialog.asksaveasfilename() networkwriter.NetworkWriter.writeToFile(net,save_filename) def load_net(): @@ -217,7 +216,6 @@ def load_net(): def ontimer(): - #print 'someshit' main_loop() # check the serial port master.after(10, ontimer) @@ -231,45 +229,43 @@ def h(sig, f): #print "mapper signal handler" #print (sig.name, f) - s_indx=str.split(sig.name,"/input/") -# print sig.name global data_input global data_output - data_input[int(s_indx[1])]=float(f/100.0) -# print(int(s_indx[1]),data_input[int(s_indx[1])]) + + #print sig.name + if '/in' in sig.name: + s_indx=str.split(sig.name,"/in") + + data_input[int(s_indx[1])]=float(f) + #print(int(s_indx[1]),data_input[int(s_indx[1])]) + + elif '/out' in sig.name: + if (learning==1): + #print "test" + s_indx=str.split(sig.name,"/out") + data_output[int(s_indx[1])]=float(f) + #print(int(s_indx[1]),data_output[int(s_indx[1])]) -# if (learning==1): -# print(int(s_indx[1]),data_input[int(s_indx[1])]) - if ((compute==1) and (learning==0)): - #print ("inputs to net: ",data_input) - activated_out=net.activate(tuple(data_input.values())) - #print ("Activated outs: ", activated_out) - for out_index in range(num_outputs): - data_output[out_index]=activated_out[out_index] - sliders[out_index].set(int(activated_out[out_index]*100.0)) - l_outputs[out_index].update(data_output[out_index]) + + except: print "WTF, h handler not working" #create mapper signals (inputs) for l_num in range(num_inputs): - l_inputs[l_num]=l_map.add_input("/input/"+str(l_num+int(10)),'f',h,None,0,100.0) -# l_inputs[l_num]=l_map.add_input("/input/"+str(l_num),'f',h,None,0,100.0) - l_map.poll(0) - print ("creating input", "/input/"+str(l_num+int(10))) -# print ("creating input", "/input/"+str(l_num)) + l_inputs[l_num]=l_map.add_input("/in%d"%l_num, 1, 'f',None,0,1.0, h) + #l_map.poll(0) + print ("creating input", "/in"+str(l_num)) #create mapper signals (outputs) for l_num in range(num_outputs): -# l_outputs[l_num]=l_map.add_output("/output/"+str(l_num+int(10)),'f',None,0,1) - l_outputs[l_num]=l_map.add_output("/output/"+str(l_num),'f',None,0,1) - l_map.poll(0) -# print ("creating output","/output/"+str(l_num+int(10))) - print ("creating output","/output/"+str(l_num)) + l_outputs[l_num]=l_map.add_output("/out"+str(l_num), 1, 'f',None,0.0,1.0) + l_inputs[l_num + num_inputs]=l_map.add_input("/out%d"%l_num, 1, 'f',None,0,1.0, h) + #l_map.poll(0) + print ("creating output","/out"+str(l_num)) #create network net = buildNetwork(num_inputs,num_hidden,num_outputs,bias=True, hiddenclass=SigmoidLayer, outclass=SigmoidLayer, recurrent=recurrent_flag) -#net = buildNetwork(num_inputs,num_hidden,num_outputs,bias=True) #create dataSet ds = SupervisedDataSet(num_inputs, num_outputs) @@ -278,7 +274,6 @@ def h(sig, f): ontimer() - #master.after(500, ontimer) master.protocol("WM_DELETE_WINDOW", master.quit) master.mainloop() master.destroy() From b901455ced13dd025cf5bdf5cc1ec93778244b62 Mon Sep 17 00:00:00 2001 From: Yaremchuk Date: Fri, 7 Feb 2014 14:43:59 -0500 Subject: [PATCH 5/5] Update learn_gest.py Now works with most recent libmapper API --- learn_gest.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/learn_gest.py b/learn_gest.py index 6442a80..9e43e93 100644 --- a/learn_gest.py +++ b/learn_gest.py @@ -1,7 +1,7 @@ from pybrain.supervised.trainers import BackpropTrainer from pybrain.tools.shortcuts import buildNetwork from pybrain.datasets import SupervisedDataSet -from pybrain.structure.modules import SigmoidLayer +from pybrain.structure.modules import TanhLayer from pybrain.tools.customxml import networkwriter import mapper import Tkinter @@ -78,15 +78,9 @@ def main_loop(): print (tuple(data_input.values())) print ("Outputs: ") print (tuple( data_output.values())) - ds.addSample(tuple(data_input.values()),tuple(data_output.values())) - if (l_map.poll(1)) and ((compute==1) and (learning==0)): - #print "inputs to net: ", data_input - activated_out=net.activate(tuple(data_input.values())) - #print "Activated outs: ", activated_out - for out_index in range(num_outputs): - data_output[out_index]=activated_out[out_index] - sliders[out_index].set(activated_out[out_index]) - l_outputs[out_index].update(data_output[out_index]) + ds.addSample(tuple(data_input.values()),tuple(data_output.values())) + l_map.poll(100) + def on_gui_change(x,s_index): @@ -100,7 +94,7 @@ def on_gui_change(x,s_index): data_output[s_index]=float(x) l_outputs[s_index].update(float(x)) #print ("on gui change: ", data_output) - #l_map.poll(0) + l_map.poll(0) except: print ("WTF MATE? On Gui Change Error!") @@ -146,7 +140,7 @@ def compute_callback(): print ("Compute network output is now ON!") def train_callback(): - trainer = BackpropTrainer(net, learningrate=0.01, lrdecay=1, momentum=0.0, verbose=True) + trainer = BackpropTrainer(net, learningrate=0.001, lrdecay=1, momentum=0.0, verbose=True) print 'MSE before', trainer.testOnData(ds, verbose=True) epoch_count = 0 while epoch_count < 1000: @@ -224,7 +218,7 @@ def ontimer(): #mapper signal handler (updates data_input[sig_indx]=new_float_value) -def h(sig, f): +def h(sig, id, f, timetag): try: #print "mapper signal handler" #print (sig.name, f) @@ -246,7 +240,14 @@ def h(sig, f): data_output[int(s_indx[1])]=float(f) #print(int(s_indx[1]),data_output[int(s_indx[1])]) - + if ((compute==1) and (learning==0)): + #print "inputs to net: ", data_input + activated_out=net.activate(tuple(data_input.values())) + #print "Activated outs: ", activated_out + for out_index in range(num_outputs): + data_output[out_index]=activated_out[out_index] + sliders[out_index].set(activated_out[out_index]) + l_outputs[out_index].update(data_output[out_index]) except: print "WTF, h handler not working" @@ -254,18 +255,18 @@ def h(sig, f): #create mapper signals (inputs) for l_num in range(num_inputs): l_inputs[l_num]=l_map.add_input("/in%d"%l_num, 1, 'f',None,0,1.0, h) - #l_map.poll(0) + l_map.poll(0) print ("creating input", "/in"+str(l_num)) #create mapper signals (outputs) for l_num in range(num_outputs): l_outputs[l_num]=l_map.add_output("/out"+str(l_num), 1, 'f',None,0.0,1.0) - l_inputs[l_num + num_inputs]=l_map.add_input("/out%d"%l_num, 1, 'f',None,0,1.0, h) - #l_map.poll(0) + l_inputs[l_num + num_inputs]=l_map.add_input("/out%d"%l_num, 1, 'f',None,0.0,1.0) + l_map.poll(0) print ("creating output","/out"+str(l_num)) #create network -net = buildNetwork(num_inputs,num_hidden,num_outputs,bias=True, hiddenclass=SigmoidLayer, outclass=SigmoidLayer, recurrent=recurrent_flag) +net = buildNetwork(num_inputs,num_hidden,num_outputs,bias=True, hiddenclass=TanhLayer, outclass=TanhLayer, recurrent=recurrent_flag) #create dataSet ds = SupervisedDataSet(num_inputs, num_outputs)