diff options
author | Christian Andreetta <satya@gentoo.org> | 2005-01-04 11:37:55 +0000 |
---|---|---|
committer | Christian Andreetta <satya@gentoo.org> | 2005-01-04 11:37:55 +0000 |
commit | 87c0138a4087c562babad6357349c70d1799ac0e (patch) | |
tree | a3a0b3e76cecdaaba841bd14b39dac9e146bfa56 /sci-mathematics/fann/files | |
parent | Removed app-text/vilistextum. (diff) | |
download | historical-87c0138a4087c562babad6357349c70d1799ac0e.tar.gz historical-87c0138a4087c562babad6357349c70d1799ac0e.tar.bz2 historical-87c0138a4087c562babad6357349c70d1799ac0e.zip |
python bindings patch by Martin Renold (official mailing list). http://old.homeip.net/martin/fann-python-bindings.patch
Diffstat (limited to 'sci-mathematics/fann/files')
-rw-r--r-- | sci-mathematics/fann/files/digest-fann-1.2.0-r1 | 1 | ||||
-rw-r--r-- | sci-mathematics/fann/files/fann-1.2.0-r1.patch | 417 |
2 files changed, 418 insertions, 0 deletions
diff --git a/sci-mathematics/fann/files/digest-fann-1.2.0-r1 b/sci-mathematics/fann/files/digest-fann-1.2.0-r1 new file mode 100644 index 000000000000..24559c72c665 --- /dev/null +++ b/sci-mathematics/fann/files/digest-fann-1.2.0-r1 @@ -0,0 +1 @@ +MD5 d655f82d4a47e4b697b0083fdaa78c71 fann-1.2.0.tar.bz2 2082660 diff --git a/sci-mathematics/fann/files/fann-1.2.0-r1.patch b/sci-mathematics/fann/files/fann-1.2.0-r1.patch new file mode 100644 index 000000000000..12aae86dad7e --- /dev/null +++ b/sci-mathematics/fann/files/fann-1.2.0-r1.patch @@ -0,0 +1,417 @@ +diff -N -u -r fann-1.2.0.orig/python/examples/mushroom.py fann-1.2.0.my/python/examples/mushroom.py +--- fann-1.2.0.orig/python/examples/mushroom.py 2004-07-24 01:36:04.000000000 +0200 ++++ fann-1.2.0.my/python/examples/mushroom.py 2005-01-03 20:48:21.000000000 +0100 +@@ -20,9 +20,9 @@ + + # start training the network + print "Training network" +-ann.set_activation_function_hidden(fann.FANN_SIGMOID_SYMMETRIC_STEPWISE) +-ann.set_activation_function_output(fann.FANN_SIGMOID_STEPWISE) +-ann.set_training_algorithm(fann.FANN_TRAIN_INCREMENTAL) ++ann.set_activation_function_hidden(fann.SIGMOID_SYMMETRIC_STEPWISE) ++ann.set_activation_function_output(fann.SIGMOID_STEPWISE) ++ann.set_training_algorithm(fann.TRAIN_INCREMENTAL) + + ann.train_on_data(train_data, max_iterations, iterations_between_reports, desired_error) + +@@ -40,9 +40,3 @@ + print "Saving network" + ann.save("mushroom_float.net") + +-# blow it all up +-print "Cleaning up." +-ann.destroy() +-test_data.destroy() +-train_data.destroy() +- +diff -N -u -r fann-1.2.0.orig/python/examples/simple_train.py fann-1.2.0.my/python/examples/simple_train.py +--- fann-1.2.0.orig/python/examples/simple_train.py 2004-07-24 01:35:58.000000000 +0200 ++++ fann-1.2.0.my/python/examples/simple_train.py 2005-01-03 20:48:01.000000000 +0100 +@@ -12,9 +12,9 @@ + iterations_between_reports = 1000 + + ann = fann.create(connection_rate, learning_rate, (num_input, num_neurons_hidden, num_output)) ++ann.set_activation_function_output(fann.SIGMOID_SYMMETRIC_STEPWISE) + + ann.train_on_file("datasets/xor.data", max_iterations, iterations_between_reports, desired_error) + + ann.save("xor_float.net") + +-ann.destroy() +diff -N -u -r fann-1.2.0.orig/python/fann_helper.c fann-1.2.0.my/python/fann_helper.c +--- fann-1.2.0.orig/python/fann_helper.c 2004-07-26 09:52:30.000000000 +0200 ++++ fann-1.2.0.my/python/fann_helper.c 2005-01-03 22:13:09.000000000 +0100 +@@ -68,3 +68,8 @@ + return get_row_from_double_array(t->output, row, t->num_output);
+ }
+
++
++int fann_is_NULL(struct fann *ann)
++{
++ return ann == NULL ? 1 : 0;
++}
+diff -N -u -r fann-1.2.0.orig/python/fann.py fann-1.2.0.my/python/fann.py +--- fann-1.2.0.orig/python/fann.py 2004-07-26 09:46:04.000000000 +0200 ++++ fann-1.2.0.my/python/fann.py 2005-01-03 22:32:49.000000000 +0100 +@@ -21,30 +21,23 @@ + + import libfann + +-# Activation function +-FANN_LINEAR = 0 +-FANN_THRESHOLD = 1 +-FANN_THRESHOLD_SYMMETRIC = 2 +-FANN_SIGMOID = 3 +-FANN_SIGMOID_STEPWISE = 4 # default +-FANN_SIGMOID_SYMMETRIC = 5 +-FANN_SIGMOID_SYMMETRIC_STEPWISE = 6 +-FANN_GAUSSIAN = 7 +-FANN_GAUSSIAN_STEPWISE = 8 +-FANN_ELLIOT = 9 # not implemented yet +-FANN_ELLIOT_SYMMETRIC = 10 # not implemented yet +- +-# Training algorithm +-FANN_TRAIN_INCREMENTAL = 0 +-FANN_TRAIN_BATCH = 1 +-FANN_TRAIN_RPROP = 2 +-FANN_TRAIN_QUICKPROP = 3 ++# import all FANN_ constants without FANN_ prefix ++for name, value in libfann.__dict__.iteritems(): ++ if name.startswith('FANN_') and not name.endswith('_NAMES'): ++ globals()[name[5:]] = value ++del name, value + + class fann_class: + + def __init__(self, ann): ++ """ ++ Never call this directly. ++ """ + self.__ann = ann +- ++ ++ def __del__(self): ++ libfann.fann_destroy(self.__ann) ++ + def get_native_object(self): + return self.__train_data + +@@ -54,13 +47,6 @@ + """ + return libfann.fann_run(self.__ann, input) + +- def destroy(self): +- """ +- Destructs the entire network. +- Be sure to call this function after finished using the network. +- """ +- libfann.fann_destroy(self.__ann) +- + def randomize_weights(self, min_weight, max_weight): + """ + Randomize weights (from the beginning the weights are random between -0.1 and 0.1) +@@ -198,31 +184,31 @@ + """ + libfann.fann_set_activation_function_output(self.__ann, activation_function) + +- def get_activation_hidden_steepness(self): ++ def get_activation_steepness_hidden(self): + """ + Get the steepness parameter for the sigmoid function used in the hidden layers. + """ +- return libfann.get_activation_hidden_steepness(self.__ann) ++ return libfann.get_activation_steepness_hidden(self.__ann) + +- def set_activation_hidden_steepness(self, steepness): ++ def set_activation_steepness_hidden(self, steepness): + """ + Set the steepness of the sigmoid function used in the hidden layers. + Only usefull if sigmoid function is used in the hidden layers (default 0.5). + """ +- libfann.fann_set_activation_hidden_steepness(self.__ann, steepness) ++ libfann.fann_set_activation_steepness_hidden(self.__ann, steepness) + +- def get_activation_output_steepness(self): ++ def get_activation_steepness_output(self): + """ + Get the steepness parameter for the sigmoid function used in the output layer. + """ +- return libfann.fann_get_activation_output_steepness(self.__ann) ++ return libfann.fann_get_activation_steepness_output(self.__ann) + +- def set_activation_output_steepness(self, steepness): ++ def set_activation_steepness_output(self, steepness): + """ + Set the steepness of the sigmoid function used in the output layer. + Only usefull if sigmoid function is used in the output layer (default 0.5). + """ +- libfann.fann_set_activation_output_steepness(self.__ann, steepness) ++ libfann.fann_set_activation_steepness_output(self.__ann, steepness) + + def train_on_data(self, data, max_epochs, epochs_between_reports, desired_error): + """ +@@ -269,7 +255,12 @@ + class train_class: + + def __init__(self, train_data): ++ """ ++ Never call this directly. ++ """ + self.__train_data = train_data ++ def __del__(self): ++ libfann.fann_destroy_train(self.__train_data) + + def get_native_object(self): + return self.__train_data +@@ -289,13 +280,6 @@ + def get_output(self, index): + return libfann.get_train_data_output(self.__train_data, index); + +- def destroy(self): +- """ +- Destructs the training data +- Be sure to call this function after finished using the training data. +- """ +- libfann.fann_destroy_train(self.__train_data) +- + def shuffle(self): + """ + Shuffles training data, randomizing the order +@@ -317,12 +301,10 @@ + + def merge(self, other): + """ +- Merges training data into a single struct ++ Merges training data into a new struct + """ + outcome = libfann.fann_merge_train_data(self.__train_data, other.get_native_object()) +- self.destroy() +- self.__train_data = outcome +- return self ++ return train_class(outcome) + + def duplicate(self): + """ +@@ -345,6 +327,8 @@ + When running the network, the bias nodes always emits 1 + """ + ann = libfann.fann_create_array(connection_rate, learning_rate, len(layers), layers) ++ if libfann.fann_is_NULL(ann): ++ return None # probably won't happen + return fann_class(ann) + + def create_from_file(filename): +@@ -352,6 +336,8 @@ + Constructs a backpropagation neural network from a configuration file. + """ + ann = libfann.fann_create_from_file(filename) ++ if libfann.fann_is_NULL(ann): ++ raise IOError, "Could not load ann from file '%s'" + filename + return fann_class(ann) + + def read_train_from_file(filename): +diff -N -u -r fann-1.2.0.orig/python/libfann.i fann-1.2.0.my/python/libfann.i +--- fann-1.2.0.orig/python/libfann.i 2004-07-20 00:21:20.000000000 +0200 ++++ fann-1.2.0.my/python/libfann.i 2005-01-03 22:58:56.000000000 +0100 +@@ -7,15 +7,16 @@ + #include "../src/include/fann.h"
+ %}
+
+-%typemap(in) fann_type[ANY] {
++%define CHECKED_FLOAT_ARRAY(typemap_name, expected_length)
++%typemap(in) typemap_name {
+ int i;
+ if (!PySequence_Check($input)) {
+ PyErr_SetString(PyExc_ValueError,"Expected a sequence");
+- return NULL;
++ SWIG_fail;
+ }
+- if (PySequence_Length($input) == 0) {
+- PyErr_SetString(PyExc_ValueError,"Size mismatch. Expected some elements");
+- return NULL;
++ if (PySequence_Length($input) != expected_length) {
++ PyErr_SetString(PyExc_ValueError,"Sequence has wrong length");
++ SWIG_fail;
+ }
+ $1 = (float *) malloc(PySequence_Length($input)*sizeof(float));
+ for (i = 0; i < PySequence_Length($input); i++) {
+@@ -24,20 +25,29 @@ + $1[i] = (float) PyFloat_AsDouble(o);
+ } else {
+ PyErr_SetString(PyExc_ValueError,"Sequence elements must be numbers");
+- return NULL;
++ Py_DECREF(o);
++ SWIG_fail;
+ }
++ Py_DECREF(o);
+ }
+ }
++%typemap(freearg) typemap_name {
++ if ($1) free($1);
++}
++%enddef
++
++CHECKED_FLOAT_ARRAY(fann_type *input, arg1->num_input)
++CHECKED_FLOAT_ARRAY(fann_type *desired_output, arg1->num_output)
+
+ %typemap(in) int[ANY] {
+ int i;
+ if (!PySequence_Check($input)) {
+ PyErr_SetString(PyExc_ValueError,"Expected a sequence");
+- return NULL;
++ SWIG_fail;
+ }
+ if (PySequence_Length($input) == 0) {
+ PyErr_SetString(PyExc_ValueError,"Size mismatch. Expected some elements");
+- return NULL;
++ SWIG_fail;
+ }
+ $1 = (unsigned int *) malloc(PySequence_Length($input)*sizeof(unsigned int));
+ for (i = 0; i < PySequence_Length($input); i++) {
+@@ -46,37 +56,41 @@ + $1[i] = (int) PyInt_AsLong(o);
+ } else {
+ PyErr_SetString(PyExc_ValueError,"Sequence elements must be numbers");
+- return NULL;
++ Py_DECREF(o);
++ SWIG_fail;
+ }
++ Py_DECREF(o);
+ }
+ }
+-
+-%typemap(freearg) fann_type* {
++%typemap(freearg) int[ANY] {
+ if ($1) free($1);
+ }
++%apply int[ANY] {int *, unsigned int*};
++
++typedef double fann_type;
+
+ %typemap(out) PyObject* {
+ $result = $1;
+ }
+
+-%apply fann_type[ANY] {fann_type *};
+-%apply int[ANY] {int *, unsigned int*};
++// create_array is used instead
++%ignore fann_create;
++%ignore fann_create_shortcut;
+
+-#define FANN_INCLUDE
+-%varargs(10,int n = 0) fann_create;
+ %rename(fann_run_old) fann_run;
+ %rename(fann_run) fann_run2;
+
+ %rename(fann_test_old) fann_test;
+ %rename(fann_test) fann_test2;
+
++#define FANN_INCLUDE
+ %include "../src/include/fann.h"
+ %include "../src/include/fann_data.h"
++%include "../src/include/fann_activation.h"
+
+ // Helper functions
+ PyObject* fann_run2(struct fann *ann, fann_type *input);
+ PyObject* fann_test2(struct fann *ann, fann_type *input, fann_type *desired_output);
+ PyObject* get_train_data_input(struct fann_train_data *ann, int row);
+ PyObject* get_train_data_output(struct fann_train_data *ann, int row);
+-
+-
++int fann_is_NULL(struct fann *ann);
+diff -N -u -r fann-1.2.0.orig/python/makefile.gnu fann-1.2.0.my/python/makefile.gnu +--- fann-1.2.0.orig/python/makefile.gnu 2004-10-09 13:56:57.000000000 +0200 ++++ fann-1.2.0.my/python/makefile.gnu 2005-01-02 16:52:23.000000000 +0100 +@@ -1,5 +1,7 @@ + # This makefile was written to compile a distribution of pyfann for + # GNU platforms (cygwin included.) ++# ++# This is NOT needed for Linux/Unix, use setup_unix.py instead. + + TARGETS = _libfann.dll + +diff -N -u -r fann-1.2.0.orig/python/README fann-1.2.0.my/python/README +--- fann-1.2.0.orig/python/README 2004-08-06 10:54:13.000000000 +0200 ++++ fann-1.2.0.my/python/README 2005-01-02 17:00:28.000000000 +0100 +@@ -1,6 +1,9 @@ + This python binding is provided by Vincenzo Di Massa <hawk.it@tiscalinet.it> + and Gil Megidish <gil@megidish.net> + ++Instructions for Windows: ++^^^^^^^^^^^^^^^^^^^^^^^^^ ++ + MAKE + Make sure to make the fann library first. You are required to have + swig and python development files installed. After you compiled the +@@ -24,3 +27,17 @@ + + USAGE + Just import fann. ++ ++Instructions for Unix/Linux: ++^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ++ ++First build and install the fann library. Then run: ++ ++./setup_unix.py build ++./setup_unix.py install ++ ++Install alone will work too, if you run it twice (a small bug). ++The examples/ (not installed) should work now after you copy the datasets: ++ ++mkdir examples/datasets ++cp ../examples/xor.data ../benchmarks/datasets/mushroom.* examples/datasets/ +diff -N -u -r fann-1.2.0.orig/python/setup.py fann-1.2.0.my/python/setup.py +--- fann-1.2.0.orig/python/setup.py 2004-07-26 09:56:59.000000000 +0200 ++++ fann-1.2.0.my/python/setup.py 2005-01-02 16:49:45.000000000 +0100 +@@ -22,7 +22,7 @@ + """
+ override default distutils install_data, so we can copy
+ files directly, without splitting into modules, scripts,
+- packages, and extensions."
++ packages, and extensions.
+ """
+ def run(self):
+ # need to change self.install_dir to the actual library dir
+diff -N -u -r fann-1.2.0.orig/python/setup_unix.py fann-1.2.0.my/python/setup_unix.py +--- fann-1.2.0.orig/python/setup_unix.py 1970-01-01 01:00:00.000000000 +0100 ++++ fann-1.2.0.my/python/setup_unix.py 2005-01-02 16:27:17.000000000 +0100 +@@ -0,0 +1,38 @@ ++#!/usr/bin/env python ++from distutils.core import setup, Extension ++#from glob import glob ++ ++VERSION='1.2.0' ++ ++LONG_DESCRIPTION="""\ ++Fast Artificial Neural Network Library implements multilayer ++artificial neural networks with support for both fully connected ++and sparsely connected networks. It includes a framework for easy ++handling of training data sets. It is easy to use, versatile, well ++documented, and fast. ++""" ++ ++module1 = Extension( ++ '_libfann', ++ sources = ['libfann.i', 'fann_helper.c'], ++ libraries = ['fann'], ++ #extra_objects = glob('../src/fann*.o'), ++ ) ++ ++setup( ++ name='pyfann', ++ version=VERSION, ++ description='Fast Artificial Neural Network Library (fann)', ++ long_description=LONG_DESCRIPTION, ++ author='Steffen Nissen', ++ author_email='lukesky@diku.dk', ++ maintainer='Gil Megidish', ++ maintainer_email='gil@megidish.net', ++ url='http://sourceforge.net/projects/fann/', ++ license='GNU LESSER GENERAL PUBLIC LICENSE (LGPL)', ++ platforms='UNIX', ++ ++ ext_modules = [module1], ++ py_modules = ['libfann', 'fann'] ++ ) ++ |