Hi Jleehurt,

I'm still newbie and don't know, but you have at least two recursive
functions - matrixAdd, and matrixMultiplyScalar.

I've modified them to work with loop/recur, but I can't tell whether
they are with same efficiency (at least no stack problem).

Still if I remove the "dorun" from the place where I've added it
originally the stack overflow still happens.

In any case, here is my latest modification (Sorry I've also modified
other pieces, trying to speed it up :) - failing miserably - you can
ignore them!)


-------


;; Neuron Activation Functions

;threshold
(defn threshold [#^Double x]
  (if (>= x 0.0) 1.0 0.0))

;signum (threshold)
(defn signum [x]
  (Math/signum x))

;; Matrix Functions
(defn transpose [matrix]
  (if (not (nil? matrix))
      (apply map list matrix)))

(defn matrixMultiply [matrixA matrixB]
  (dorun matrixA)
  (dorun matrixB)
  (map
   (fn [row]
     (apply map
            (fn [& column]
              (apply + (map * row column)))
            matrixB))
   matrixA))

(defn matrixAdd-old [matrixA matrixB]
  (if (and (not (empty? matrixA))
           (not (empty? matrixB)))
      (conj
        (matrixAdd-old (rest matrixA) (rest matrixB))
        (map + (first matrixA) (first matrixB)))))

(defn matrixAdd [matrixA matrixB]
  (loop [result ()
         matrixA matrixA
         matrixB matrixB]
    (let [rowA (first matrixA)
          rowB (first matrixB)]
      (if (or (nil? rowA) (nil? rowB))
        (reverse result)
        (recur (conj result (map + rowA rowB))
               (rest matrixA)
               (rest matrixB))))))

(defn matrixMultiplyScalar-old [matrixA scalar]
  (if (not (empty? matrixA))
      (conj
       (matrixMultiplyScalar-old (rest matrixA) scalar)
       (map (fn [arg] (* arg scalar))
            (first matrixA)))))

(defn matrixMultiplyScalar [matrixA scalar]
  (loop [result ()
         matrixA matrixA]
    (let [row (first matrixA)]
      (if (nil? row)
        (reverse result)
        (recur (conj result (map (fn [arg] (* arg scalar)) row))
               (rest matrixA))))))

;; Binary Logic Input/Output
(def infiniteInputCollection
     (cycle [[[-1.0 -1.0]] [[-1.0 1.0]] [[1.0 -1.0]] [[1.0 1.0]]]))

(def infiniteAndOutputCollection
     (cycle [-1.0 -1.0 -1.0 1.0]))

(defn buildInputs [numberOfInputs]
  (loop [inputVector []
         binaryInputCollection infiniteInputCollection
         remainingCount numberOfInputs]
        (if (> 0 remainingCount)
            inputVector
            (recur
             (conj inputVector (first binaryInputCollection))
             (rest binaryInputCollection) (dec remainingCount)))))

(defn buildOutputs [numberOfOutputs outputCollection]
  (loop [outputVector []
         andOutputCollection outputCollection
         remainingCount numberOfOutputs]
        (if (> 0 remainingCount)
            outputVector
            (recur (conj outputVector (first andOutputCollection))
                   (rest andOutputCollection) (dec remainingCount)))))

;; Main

;learning rate parameter eta
(def learningRateParameter 0.5)

;multiply the transpose of the weight vector with the input vector
;apply the signum function to the scalar result
(defn computeActualResponse [signumFunction weights inputs]
  (signumFunction
   (first
    (first (matrixMultiply (transpose weights) inputs)))))

;return an updated weight vector of the perceptron
(defn getAdaptedWeightVector [weights inputs desiredResponse
actualResponse]
  (let [etaDeltaDesiredActual (* learningRateParameter (-
desiredResponse actualResponse))]
       (matrixAdd weights (matrixMultiplyScalar inputs
etaDeltaDesiredActual))))

;train the perceptron with the inputs and corresponding known outputs
(defn trainPerceptron [beginningWeightVector allInputs allOutputs]
  (loop [weightVector beginningWeightVector
         inputs allInputs
         responses allOutputs]
        (if (or (empty? inputs)
                (empty? responses))
          weightVector
          (recur (getAdaptedWeightVector weightVector
                                         (first inputs)
                                         (first responses)
                                         (computeActualResponse signum 
weightVector (first inputs)))
                 (rest inputs)
                 (rest responses)))))

(defn main [sizeOfDataSet]
  (let [weights [[0.0 0.0]]
        inputs (buildInputs sizeOfDataSet)
        outputs (buildOutputs sizeOfDataSet
infiniteAndOutputCollection)]
       (trainPerceptron weights inputs outputs)))


-------






On Apr 21, 12:58 am, jleehurt <jleeh...@gmail.com> wrote:
> Hi Dimiter,
>
> Thank you! I'm still a bit confused as to why this was happening. Does
> lazy evaluation not work well with recursion?
>
> On Apr 20, 11:06 pm, "Dimiter \"malkia\" Stanev" <mal...@gmail.com>
> wrote:
>
>
>
> > I blindly tried printing out stuff from matrixMultiply, and found out
> > that if I print matrixA and matrixB it doesn't run out of stack, so I
> > guess I was "forcing" them to "work", here is a version with (dorun)
> > that has the same side effect, without printing:
>
> > (defn matrixMultiply [matrixA matrixB]
> >   (dorun matrixA)
> >   (dorun matrixB)
> >   (map
> >    (fn [row]
> >      (apply map
> >             (fn [& column]
> >               (apply + (map * row column)))
> >             matrixB))
> >    matrixA))
>
> > user> (main 100000)
> > ((0.5 50000.5))
> > user> (main 1000000)
> > ((0.5 500000.5))
> > user> (time (main 1000000))
> > "Elapsed time: 8314.617 msecs"
> > ((0.5 500000.5))
> > user> (time (main 10000000))
> > ; Evaluation aborted. ;; Actually not stack overflow, but HEAP
> > overflow (it took a while though)
> > user>
>
> > Thanks,
> > Dimiter "malkia" Stanev.
>
> > On Apr 20, 10:01 pm, jleehurt <jleeh...@gmail.com> wrote:
>
> > > Hi David,
>
> > > Those two are not recursive, but they call into other functions that
> > > are. Do I need to make sure that all recursive functions use the loop/
> > > recur pattern? Or should I not nest recursive calls like this?
>
> > > Here is the whole source:
>
> > > ;; Neuron Activation Functions
>
> > > ;threshold
> > > (defn threshold [x] (if (>= x 0) 1 0))
>
> > > ;signum (threshold)
> > > (defn signum [x] (cond (> x 0) 1 (= x 0) 0 (< x 0) -1))
>
> > > ;; Matrix Functions
>
> > > (defn transpose [matrix]
> > >   (if (not (nil? matrix))
> > >       (apply map list matrix)))
>
> > > (defn transpose2 [matrix]
> > >   (apply map (fn [& column] column) matrix))
>
> > > (defn matrixMultiply [matrixA matrixB]
> > >   (map
> > >     (fn [row] (apply map (fn [& column] (apply + (map * row column)))
> > > matrixB))
> > >     matrixA))
>
> > > (defn matrixAdd [matrixA matrixB]
> > >   (if (and (not (empty? matrixA)) (not (empty? matrixB)))
> > >       (conj
> > >         (matrixAdd (rest matrixA) (rest matrixB))
> > >         (map + (first matrixA) (first matrixB)))))
>
> > > (defn matrixMultiplyScalar [matrixA scalar]
> > >   (if (not (empty? matrixA))
> > >       (conj
> > >         (matrixMultiplyScalar (rest matrixA) scalar)
> > >         (map (fn [arg] (* arg scalar)) (first matrixA)))))
>
> > > ;; Vector Functions
>
> > > (defn transposeVector [v]
> > >   (if (not (nil? v))
> > >       (transpose (vector v))))
>
> > > (defn vectorMultiplyScalar [v scalar]
> > >   (map * v (cycle [ scalar ])))
>
> > > ;; Binary Logic Input/Output
>
> > > (def infiniteInputCollection (cycle [[[-1 -1]] [[-1 1]] [[1 -1]] [[1
> > > 1]]]))
> > > (def infiniteAndOutputCollection (cycle [-1 -1 -1 1]))
>
> > > (defn buildInputs [numberOfInputs]
> > >   (loop [inputVector []
> > >          binaryInputCollection infiniteInputCollection
> > >          remainingCount numberOfInputs]
> > >         (if (> 0 remainingCount)
> > >             inputVector
> > >             (recur
> > >               (conj inputVector (first binaryInputCollection)) (rest
> > > binaryInputCollection) (dec remainingCount)))))
>
> > > (defn buildOutputs [numberOfOutputs outputCollection]
> > >   (loop [outputVector []
> > >          andOutputCollection outputCollection
> > >          remainingCount numberOfOutputs]
> > >         (if (> 0 remainingCount)
> > >             outputVector
> > >             (recur (conj outputVector (first andOutputCollection))
> > > (rest andOutputCollection) (dec remainingCount)))))
>
> > > ;; Main
>
> > > ;learning rate parameter eta
> > > (def learningRateParameter 0.5)
>
> > > ;the weight vector of the perceptron
> > > (def weightVector (ref nil))
>
> > > ;multiply the transpose of the weight vector with the input vector
> > > ;apply the signum function to the scalar result
> > > (defn computeActualResponse [signumFunction weights inputs]
> > >   (if (and (not (nil? weights)) (not (nil? inputs)))
> > >       (signumFunction (first (first (matrixMultiply (transpose
> > > weights) inputs))))))
>
> > > ;return an updated weight vector of the perceptron
> > > (defn getAdaptedWeightVector [weights inputs desiredResponse
> > > actualResponse]
> > >   (let [etaDeltaDesiredActual (* learningRateParameter (-
> > > desiredResponse actualResponse))]
> > >        (matrixAdd weights (matrixMultiplyScalar inputs
> > > etaDeltaDesiredActual))))
>
> > > ;train the perceptron with the inputs and corresponding known outputs
> > > (defn trainPerceptron [beginningWeightVector allInputs allOutputs]
> > >   (loop [weightVector beginningWeightVector
> > >          inputs allInputs
> > >          responses allOutputs]
> > >         (if (and (not (empty? inputs)) (not (empty? responses)))
> > >             (let [adaptedWeightVector
> > >                   (getAdaptedWeightVector
> > >                     weightVector
> > >                     (first inputs)
> > >                     (first responses)
> > >                     (computeActualResponse signum weightVector (first
> > > inputs)))]
> > >                  (recur adaptedWeightVector (rest inputs) (rest
> > > responses)))
> > >             weightVector)))
>
> > > (defn main [sizeOfDataSet]
> > >   (let [weights [[0 0]]
> > >         inputs (buildInputs sizeOfDataSet)
> > >         outputs (buildOutputs sizeOfDataSet
> > > infiniteAndOutputCollection)]
> > >        (trainPerceptron weights inputs outputs)))
>
> > > On Apr 20, 6:32 am, David Nolen <dnolen.li...@gmail.com> wrote:> You have 
> > > two other function calls
> > > > getAdaptedWeightVector
> > > > computeActualResponse
>
> > > > Are these recursive as well?
>
> > > > On Sun, Apr 19, 2009 at 11:26 PM, jleehurt <jleeh...@gmail.com> wrote:
>
> > > > > Hi all, I have the following code that trains a perceptron with the
> > > > > given inputs and corresponding desired inputs. For input/output
> > > > > vectors, when the size gets to about 2000, I am getting a
> > > > > java.lang.StackOverflowError in the following function:
>
> > > > > (defn trainPerceptron [beginningWeightVector allInputs allOutputs]
> > > > >  (loop [weightVector beginningWeightVector
> > > > >         inputs allInputs
> > > > >         responses allOutputs]
> > > > >        (if (and (not (empty? inputs)) (not (empty? responses)))
> > > > >            (let [adaptedWeightVector
> > > > >                  (getAdaptedWeightVector
> > > > >                    weightVector
> > > > >                    (first inputs)
> > > > >                    (first responses)
> > > > >                    (computeActualResponse signum weightVector (first
> > > > > inputs)))]
> > > > >                 (recur adaptedWeightVector (rest inputs) (rest
> > > > > responses)))
> > > > >            weightVector)))
>
> > > > > Is not the purpose of loop/recur to avoid stack overflow problems?
> > > > > What am I doing wrong?
--~--~---------~--~----~------------~-------~--~----~
You received this message because you are subscribed to the Google Groups 
"Clojure" group.
To post to this group, send email to clojure@googlegroups.com
To unsubscribe from this group, send email to 
clojure+unsubscr...@googlegroups.com
For more options, visit this group at 
http://groups.google.com/group/clojure?hl=en
-~----------~----~----~----~------~----~------~--~---

Reply via email to