Revision 271

View differences:

org.gvsig.toolbox/tags/org.gvsig.toolbox-1.0.43/org.gvsig.toolbox.math/build.xml
1
<project name="SEXTANTE_LIB" default="generate-without-source" basedir=".">
2
    <description>
3
        SEXTANTE_LIB
4
    </description>
5
    <property name="version.number" value="0.7"/>
6

  
7
  <target name="generate-without-source"
8
    description="generate the distribution without the source file">
9

  
10
       <tstamp>
11
         <format property="TODAY" pattern="yyyy-MM-dd HH:mm:ss" />
12
       </tstamp>
13

  
14
    <manifest file="MANIFEST.MF">
15
        <attribute name="Implementation-Version"
16
            value="${version.number}"/>
17
        <attribute name="Built-Date" value="${TODAY}"/>
18
   </manifest>
19

  
20
    <jar jarfile="../dist/libMath.jar" manifest="MANIFEST.MF">
21
        <fileset dir="bin">
22
            <include name="**"/>
23
        </fileset>
24
    </jar>
25
  	
26
    <copy todir="../dist">
27
  	  <fileset dir="lib" includes="**"/>
28
    </copy>
29

  
30
  </target>
31

  
32
</project>
0 33

  
org.gvsig.toolbox/tags/org.gvsig.toolbox-1.0.43/org.gvsig.toolbox.math/MANIFEST.MF
1
Manifest-Version: 1.0
2
Ant-Version: Apache Ant 1.7.0
3
Created-By: 16.3-b01 (Sun Microsystems Inc.)
4
Implementation-Version: 0.7
5
Built-Date: 2011-12-14 14:01:08
6

  
0 7

  
org.gvsig.toolbox/tags/org.gvsig.toolbox-1.0.43/org.gvsig.toolbox.math/src/main/java/es/unex/sextante/math/regression/MultipleRegression.java
1
package es.unex.sextante.math.regression;
2

  
3
import java.util.ArrayList;
4

  
5
import Jama.Matrix;
6

  
7

  
8
/**
9
 * A class for performing multiple regressions
10
 * 
11
 * @author volaya
12
 * 
13
 */
14
public class MultipleRegression {
15

  
16
   private class MaxValues {
17

  
18
      public int    iMax;
19

  
20
      public double dRMax;
21

  
22
   }
23

  
24
   private final ArrayList m_Y = new ArrayList();
25

  
26
   private final ArrayList m_X[];
27

  
28
   private final double    m_dDCoeff[];
29

  
30
   private final double    m_dRCoeff[];
31

  
32
   private final int       m_iOrder[];
33

  
34

  
35
   public MultipleRegression(final int iVariables) {
36

  
37
      m_X = new ArrayList[iVariables];
38
      for (int i = 0; i < m_X.length; i++) {
39
         m_X[i] = new ArrayList();
40
      }
41
      m_dRCoeff = new double[iVariables + 1];
42
      m_dDCoeff = new double[iVariables + 1];
43
      m_iOrder = new int[iVariables + 1];
44

  
45
   }
46

  
47

  
48
   public void addValue(final double dX[],
49
                        final double dY) {
50

  
51
      if (dX.length == m_X.length) {
52
         m_Y.add(new Double(dY));
53
         for (int i = 0; i < dX.length; i++) {
54
            m_X[i].add(new Double(dX[i]));
55
         }
56
      }
57

  
58
   }
59

  
60

  
61
   public boolean calculate() {
62

  
63
      if ((m_Y.size() > m_iOrder.length - 1) && (m_iOrder.length > 0)) {
64
         getRegression();
65
         getCorrelation();
66
         return true;
67
      }
68

  
69
      return false;
70

  
71
   }
72

  
73

  
74
   private boolean eliminate(final double[] X,
75
                             final double[] Y) {
76

  
77
      final Regression regression = new Regression();
78

  
79
      if (regression.calculate(X, Y)) {
80
         for (int i = 0; i < Y.length; i++) {
81
            Y[i] -= regression.getConstant() + regression.getCoeff() * X[i];
82
         }
83

  
84
         return true;
85
      }
86

  
87
      return false;
88

  
89
   }
90

  
91

  
92
   public double getCoeff(int iVariable) {
93

  
94
      if ((++iVariable > 0) && (iVariable < m_dDCoeff.length)) {
95
         return (m_dRCoeff[iVariable]);
96
      }
97

  
98
      return 0.0;
99

  
100
   }
101

  
102

  
103
   public double getConstant() {
104

  
105
      if (m_dRCoeff.length > 1) {
106
         return (m_dRCoeff[0]);
107
      }
108

  
109
      return (0.0);
110
   }
111

  
112

  
113
   boolean getCorrelation() {
114

  
115
      int i, k, nVariables, nValues;
116
      double Values[][], r2_sum;
117
      final MaxValues maxValues = new MaxValues();
118

  
119
      nVariables = m_iOrder.length;
120
      nValues = m_Y.size();
121

  
122
      if ((nValues >= nVariables) && (nVariables > 1)) {
123
         Values = new double[nVariables][nValues];
124

  
125
         for (k = 0; k < nValues; k++) {
126
            Values[0][k] = ((Double) m_Y.get(k)).doubleValue();
127
         }
128

  
129
         for (i = 1; i < nVariables; i++) {
130
            for (k = 0; k < nValues; k++) {
131
               Values[i][k] = ((Double) m_X[i - 1].get(k)).doubleValue();
132
            }
133
         }
134

  
135
         m_iOrder[0] = -1;
136
         m_dDCoeff[0] = -1;
137

  
138
         for (i = 0, r2_sum = 0.0; i < nVariables - 1; i++) {
139
            getCorrelation(Values, Values[0], maxValues);
140

  
141
            r2_sum += (1.0 - r2_sum) * maxValues.dRMax;
142

  
143
            m_iOrder[maxValues.iMax] = i;
144
            m_dDCoeff[maxValues.iMax] = r2_sum;
145
         }
146

  
147
         return true;
148
      }
149

  
150
      return false;
151
   }
152

  
153

  
154
   boolean getCorrelation(final double[][] X,
155
                          final double[] Y,
156
                          final MaxValues maxValues) {
157

  
158
      int i, n;
159
      double XMax[];
160
      final Regression regression = new Regression();
161

  
162
      for (i = 1, n = 0, maxValues.iMax = -1, maxValues.dRMax = 0.0; i < X.length; i++) {
163
         if ((X[i] != null) && regression.calculate(X[i], Y)) {
164
            n++;
165
            if ((maxValues.iMax < 0) || (maxValues.dRMax < regression.getR2())) {
166
               maxValues.iMax = i;
167
               maxValues.dRMax = regression.getR2();
168
            }
169
         }
170
      }
171

  
172
      if (n > 1) {
173
         XMax = X[maxValues.iMax];
174
         X[maxValues.iMax] = null;
175

  
176
         for (i = 0; i < X.length; i++) {
177
            if (X[i] != null) {
178
               eliminate(XMax, X[i]);
179
            }
180
         }
181

  
182
         eliminate(XMax, Y);
183
      }
184

  
185
      return (maxValues.iMax >= 1);
186
   }
187

  
188

  
189
   public int getOrder(int iVariable) {
190

  
191
      if ((++iVariable > 0) && (iVariable < m_iOrder.length)) {
192
         return (m_iOrder[iVariable]);
193
      }
194

  
195
      return -1;
196
   }
197

  
198

  
199
   public int getOrdered(final int iOrder) {
200

  
201
      for (int i = 0; i < m_iOrder.length; i++) {
202
         if (iOrder == m_iOrder[i]) {
203
            return i - 1;
204
         }
205
      }
206

  
207
      return -1;
208
   }
209

  
210

  
211
   public double getR2(int iVariable) {
212

  
213
      if ((++iVariable > 0) && (iVariable < m_dDCoeff.length)) {
214
         return (m_dDCoeff[iVariable]);
215
      }
216

  
217
      return 0.0;
218
   }
219

  
220

  
221
   public double getR2Change(final int iVariable) {
222

  
223
      final int iOrder = getOrder(iVariable);
224

  
225
      if (iOrder > 0) {
226
         return (getR2(iVariable) - getR2(getOrdered(iOrder - 1)));
227
      }
228

  
229
      if (iOrder == 0) {
230
         return (getR2(iVariable));
231
      }
232

  
233
      return (0.0);
234
   }
235

  
236

  
237
   private boolean getRegression() {
238

  
239
      int i, j, k, nVariables, nValues;
240
      double sum, B[], P[][], X[][], Y[];
241

  
242
      nVariables = m_iOrder.length;
243
      nValues = m_Y.size();
244

  
245
      B = new double[nVariables];
246
      P = new double[nVariables][nVariables];
247

  
248
      Y = new double[nValues];
249
      X = new double[nVariables][nValues];
250

  
251
      for (k = 0; k < nValues; k++) {
252
         Y[k] = ((Double) m_Y.get(k)).doubleValue();
253
         X[0][k] = 1.0;
254
      }
255

  
256
      for (i = 1; i < nVariables; i++) {
257
         for (k = 0; k < nValues; k++) {
258
            X[i][k] = ((Double) m_X[i - 1].get(k)).doubleValue();
259
         }
260
      }
261

  
262
      for (i = 0; i < nVariables; i++) {
263
         for (k = 0, sum = 0.0; k < nValues; k++) {
264
            sum += X[i][k] * Y[k];
265
         }
266

  
267
         B[i] = sum;
268

  
269
         for (j = 0; j < nVariables; j++) {
270
            for (k = 0, sum = 0.0; k < nValues; k++) {
271
               sum += X[i][k] * X[j][k];
272
            }
273
            P[i][j] = sum;
274
         }
275
      }
276

  
277
      final Matrix m = new Matrix(P);
278
      Matrix inverse;
279
      try {
280
         inverse = m.inverse();
281
      }
282
      catch (final Exception e) {
283
         return false;
284
      }
285

  
286
      for (i = 0; i < nVariables; i++) {
287
         for (j = 0, sum = 0.0; j < nVariables; j++) {
288
            sum += inverse.get(i, j) * B[j];
289
         }
290
         m_dRCoeff[i] = sum;
291
      }
292

  
293
      return true;
294

  
295
   }
296

  
297
}
0 298

  
org.gvsig.toolbox/tags/org.gvsig.toolbox-1.0.43/org.gvsig.toolbox.math/src/main/java/es/unex/sextante/math/regression/LeastSquaresFit.java
1
package es.unex.sextante.math.regression;
2

  
3
import java.text.DecimalFormat;
4
import java.util.ArrayList;
5

  
6
import Jama.Matrix;
7
import Jama.QRDecomposition;
8

  
9
/*import es.unex.sextante.libMath.Jama.Matrix;
10
import es.unex.sextante.libMath.Jama.QRDecomposition;*/
11

  
12

  
13
/**
14
 * Least squares fitting
15
 * 
16
 * @author volaya
17
 * 
18
 */
19
public class LeastSquaresFit {
20

  
21
   private final ArrayList m_X     = new ArrayList();
22

  
23
   private final ArrayList m_Y     = new ArrayList();
24

  
25
   private double[]        m_dCoeffs;
26

  
27
   private String          m_sExpression;
28

  
29
   private double          m_dYMin = Double.MAX_VALUE;
30

  
31
   private double          m_dXMax = Double.NEGATIVE_INFINITY;
32

  
33
   private double          m_dXMin = Double.MAX_VALUE;
34

  
35
   private double          m_dYMax = Double.NEGATIVE_INFINITY;
36

  
37

  
38
   public void addValue(final double dX,
39
                        final double dY) {
40

  
41
      setMinMaxX(dX);
42
      setMinMaxY(dY);
43

  
44
      m_X.add(new Double(dX));
45
      m_Y.add(new Double(dY));
46

  
47
   }
48

  
49

  
50
   public boolean calculate(final int iOrder) {
51

  
52
      double dX, dY;
53
      final int nk = iOrder + 1;
54

  
55
      final double[][] alpha = new double[nk][nk];
56
      final double[] beta = new double[nk];
57
      double term = 0;
58

  
59
      m_dCoeffs = new double[nk];
60

  
61
      final int iNumPoints = m_X.size();
62

  
63
      for (int k = 0; k < nk; k++) {
64

  
65
         for (int j = k; j < nk; j++) {
66
            term = 0.0;
67
            alpha[k][j] = 0.0;
68
            for (int i = 0; i < iNumPoints; i++) {
69

  
70
               double prod1 = 1.0;
71
               dX = ((Double) m_X.get(i)).doubleValue();
72
               if (k > 0) {
73
                  for (int m = 0; m < k; m++) {
74
                     prod1 *= dX;
75
                  }
76
               }
77
               double prod2 = 1.0;
78
               if (j > 0) {
79
                  for (int m = 0; m < j; m++) {
80
                     prod2 *= dX;
81
                  }
82
               }
83

  
84
               term = (prod1 * prod2);
85

  
86
               alpha[k][j] += term;
87
            }
88
            alpha[j][k] = alpha[k][j];
89
         }
90

  
91
         for (int i = 0; i < iNumPoints; i++) {
92
            dX = ((Double) m_X.get(i)).doubleValue();
93
            dY = ((Double) m_Y.get(i)).doubleValue();
94
            double prod1 = 1.0;
95
            if (k > 0) {
96
               for (int m = 0; m < k; m++) {
97
                  prod1 *= dX;
98
               }
99
            }
100
            term = (dY * prod1);
101

  
102
            beta[k] += term;
103
         }
104
      }
105

  
106
      final Matrix alpha_matrix = new Matrix(alpha);
107
      final QRDecomposition alpha_QRD = new QRDecomposition(alpha_matrix);
108
      final Matrix beta_matrix = new Matrix(beta, nk);
109
      Matrix param_matrix;
110
      try {
111
         param_matrix = alpha_QRD.solve(beta_matrix);
112
      }
113
      catch (final Exception e) {
114
         return false;
115
      }
116

  
117
      final DecimalFormat df = new DecimalFormat("####.#####");
118
      final StringBuffer sb = new StringBuffer("");
119
      for (int k = 0; k < nk; k++) {
120
         m_dCoeffs[k] = param_matrix.get(k, 0);
121
         if (k != 0) {
122
            sb.append(" + " + df.format(m_dCoeffs[k]) + "x^" + Integer.toString(k));
123
         }
124
         else {
125
            sb.append(df.format(m_dCoeffs[k]));
126
         }
127
      }
128

  
129
      m_sExpression = sb.toString();
130

  
131
      return true;
132

  
133
   }
134

  
135

  
136
   public String getExpression() {
137

  
138
      return m_sExpression;
139

  
140
   }
141

  
142

  
143
   public int getNumPoints() {
144

  
145
      return m_X.size();
146
   }
147

  
148

  
149
   public void getPoints(final double[] x,
150
                         final double[] y) {
151

  
152
      int i;
153

  
154
      for (i = 0; i < m_X.size(); i++) {
155
         x[i] = ((Double) m_X.get(i)).doubleValue();
156
         y[i] = ((Double) m_Y.get(i)).doubleValue();
157
      }
158

  
159
   }
160

  
161

  
162
   public double getXMax() {
163

  
164
      return m_dXMax;
165

  
166
   }
167

  
168

  
169
   public double getXMin() {
170

  
171
      return m_dXMin;
172

  
173
   }
174

  
175

  
176
   public double getY(final double x) {
177

  
178
      int i;
179
      double dRet = 0;
180

  
181
      for (i = 0; i < m_dCoeffs.length; i++) {
182
         dRet += m_dCoeffs[i] * Math.pow(x, i);
183
      }
184

  
185
      return dRet;
186

  
187
   }
188

  
189

  
190
   public double getYMax() {
191

  
192
      return m_dYMax;
193

  
194
   }
195

  
196

  
197
   public double getYMin() {
198

  
199
      return m_dYMin;
200

  
201
   }
202

  
203

  
204
   private void setMinMaxX(final double x) {
205

  
206
      if (x > m_dXMax) {
207
         m_dXMax = x;
208
      }
209
      if (x < m_dXMin) {
210
         m_dXMin = x;
211
      }
212

  
213
   }
214

  
215

  
216
   private void setMinMaxY(final double y) {
217

  
218
      if (y > m_dYMax) {
219
         m_dYMax = y;
220
      }
221
      if (y < m_dYMin) {
222
         m_dYMin = y;
223
      }
224

  
225
   }
226
}
0 227

  
org.gvsig.toolbox/tags/org.gvsig.toolbox-1.0.43/org.gvsig.toolbox.math/src/main/java/es/unex/sextante/math/regression/Regression.java
1
/*******************************************************************************
2
Regression.java
3
Copyright (C) Victor Olaya
4

  
5
Adapted from SAGA, System for Automated Geographical Analysis.
6
Copyrights (c) 2002-2005 by Olaf Conrad
7
Portions (c) 2002 by Andre Ringeler
8
Portions (c) 2005 by Victor Olaya
9

  
10
This program is free software; you can redistribute it and/or modify
11
it under the terms of the GNU General Public License as published by
12
the Free Software Foundation; either version 2 of the License, or
13
(at your option) any later version.
14

  
15
This program is distributed in the hope that it will be useful,
16
but WITHOUT ANY WARRANTY; without even the implied warranty of
17
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18
GNU General Public License for more details.
19

  
20
You should have received a copy of the GNU General Public License
21
along with this program; if not, write to the Free Software
22
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
23
 *******************************************************************************/
24

  
25
package es.unex.sextante.math.regression;
26

  
27
import java.text.DecimalFormat;
28
import java.util.ArrayList;
29

  
30
/**
31
 * Simple regression class
32
 *
33
 * @author volaya
34
 *
35
 */
36
public class Regression {
37

  
38
   public static final int     REGRESSION_Best_Fit = 0;
39

  
40
   public static final int     REGRESSION_Linear   = 1;              // Y = a + b * X
41

  
42
   public static final int     REGRESSION_Rez_X    = 2;              // Y = a + b / X
43

  
44
   public static final int     REGRESSION_Rez_Y    = 3;              // Y = a / (b - X)
45

  
46
   public static final int     REGRESSION_Pow      = 4;              // Y = a * X^b
47

  
48
   public static final int     REGRESSION_Exp      = 5;              // Y = a * e^(b * X)
49

  
50
   public static final int     REGRESSION_Log      = 6;              // Y = a + b * ln(X)
51

  
52
   private static final double ALMOST_ZERO         = 0.0001;
53

  
54
   private final ArrayList           m_X                 = new ArrayList();
55

  
56
   private final ArrayList           m_Y                 = new ArrayList();
57

  
58
   private double              m_dR;
59

  
60
   private double              m_dCoeff;
61

  
62
   private double              m_dConst;
63

  
64
   private double              m_dXMin, m_dXMax, m_dYMin, m_dYMax;
65

  
66
   private double              m_dXMean, m_dYMean;
67

  
68
   private double              m_dXVar, m_dYVar;
69

  
70
   private int                 m_iType;
71

  
72

  
73
   public Regression() {
74

  
75
   }
76

  
77

  
78
   private double _X_Transform(double x) {
79

  
80
      switch (m_iType) {
81
         default:
82
            return (x);
83

  
84
         case REGRESSION_Rez_X:
85
            if (x == 0.0) {
86
               x = ALMOST_ZERO;
87
            }
88
            return (1.0 / x);
89

  
90
         case REGRESSION_Pow:
91
         case REGRESSION_Log:
92
            if (x <= 0.0) {
93
               x = ALMOST_ZERO;
94
            }
95
            return (Math.log(x));
96
      }
97
   }
98

  
99

  
100
   private double _Y_Transform(double y) {
101

  
102
      switch (m_iType) {
103
         default:
104
            return (y);
105

  
106
         case REGRESSION_Rez_Y:
107
            if (y == 0.0) {
108
               y = ALMOST_ZERO;
109
            }
110
            return (1.0 / y);
111

  
112
         case REGRESSION_Pow:
113
         case REGRESSION_Exp:
114
            if (y <= 0.0) {
115
               y = ALMOST_ZERO;
116
            }
117
            return (Math.log(y));
118
      }
119
   }
120

  
121

  
122
   public void addValue(final double dX,
123
                        final double dY) {
124

  
125
      m_X.add(new Double(dX));
126
      m_Y.add(new Double(dY));
127

  
128
   }
129

  
130

  
131
   public boolean calculate() {
132

  
133
      return calculateLinear();
134

  
135
   }
136

  
137

  
138
   public boolean calculate(final double[] x,
139
                            final double[] y) {
140

  
141
      if (x.length == y.length) {
142
         m_X.clear();
143
         m_Y.clear();
144
         for (int i = 0; i < y.length; i++) {
145
            this.addValue(x[i], y[i]);
146
         }
147
         return calculate();
148
      }
149
      else {
150
         return false;
151
      }
152

  
153
   }
154

  
155

  
156
   public boolean calculate(final int iRegressionType) {
157

  
158
      double d;
159

  
160
      if (iRegressionType == REGRESSION_Best_Fit) {
161
         m_iType = getBestFitType();
162
      }
163
      else {
164
         m_iType = iRegressionType;
165
      }
166

  
167
      if (calculateLinear()) {
168

  
169
         switch (m_iType) {
170

  
171
            case REGRESSION_Linear:
172
            default:
173
               break;
174

  
175
            case REGRESSION_Rez_X:
176
               m_dXVar = 1.0 / m_dXVar;
177
               break;
178

  
179
            case REGRESSION_Rez_Y:
180
               d = m_dConst;
181
               m_dConst = 1.0 / m_dCoeff;
182
               m_dCoeff = d * m_dCoeff;
183
               m_dYVar = 1.0 / m_dYVar;
184
               break;
185

  
186
            case REGRESSION_Pow:
187
               m_dConst = Math.exp(m_dConst);
188
               m_dXVar = Math.exp(m_dXVar);
189
               m_dYVar = Math.exp(m_dYVar);
190
               break;
191

  
192
            case REGRESSION_Exp:
193
               m_dConst = Math.exp(m_dConst);
194
               m_dYVar = Math.exp(m_dYVar);
195
               break;
196

  
197
            case REGRESSION_Log:
198
               m_dXVar = Math.exp(m_dXVar);
199
               break;
200
         }
201

  
202
         if (m_iType != REGRESSION_Linear) {
203
            calculateMinMaxMean();
204
         }
205

  
206
         return true;
207
      }
208

  
209
      return false;
210
   }
211

  
212

  
213
   private boolean calculateLinear() {
214

  
215
      int i;
216
      double x, y, s_xx, s_xy, s_x, s_y, s_dx2, s_dy2, s_dxdy;
217

  
218
      if (m_X.size() > 1) {
219

  
220
         m_dXMean = m_dXMin = m_dXMax = _X_Transform(((Double) m_X.get(0)).doubleValue());
221
         m_dYMean = m_dYMin = m_dYMax = _Y_Transform(((Double) m_Y.get(0)).doubleValue());
222

  
223
         for (i = 1; i < m_X.size(); i++) {
224
            m_dXMean += (x = _X_Transform(((Double) m_X.get(i)).doubleValue()));
225
            m_dYMean += (y = _Y_Transform(((Double) m_Y.get(i)).doubleValue()));
226
            setMinMaxX(x);
227
            setMinMaxY(y);
228
         }
229

  
230
         m_dXMean /= m_X.size();
231
         m_dYMean /= m_X.size();
232

  
233
         if (m_dXMin < m_dXMax && m_dYMin < m_dYMax) {
234

  
235
            s_x = s_y = s_xx = s_xy = s_dx2 = s_dy2 = s_dxdy = 0.0;
236

  
237
            for (i = 0; i < m_X.size(); i++) {
238

  
239
               x = _X_Transform(((Double) m_X.get(i)).doubleValue());
240
               y = _Y_Transform(((Double) m_Y.get(i)).doubleValue());
241

  
242
               s_x += x;
243
               s_y += y;
244
               s_xx += x * x;
245
               s_xy += x * y;
246

  
247
               x -= m_dXMean;
248
               y -= m_dYMean;
249

  
250
               s_dx2 += x * x;
251
               s_dy2 += y * y;
252
               s_dxdy += x * y;
253
            }
254

  
255
            m_dXVar = s_dx2 / m_X.size();
256
            m_dYVar = s_dy2 / m_X.size();
257

  
258
            m_dCoeff = s_dxdy / s_dx2;
259
            if (m_X.size() * s_xx - s_x * s_x != 0) {
260
               m_dConst = (s_xx * s_y - s_x * s_xy) / (m_X.size() * s_xx - s_x * s_x);
261
            }
262
            else {
263
               m_dConst = 0;
264
            }
265
            m_dR = s_dxdy / Math.sqrt(s_dx2 * s_dy2);
266

  
267
            return true;
268
         }
269
      }
270

  
271
      return false;
272

  
273
   }
274

  
275

  
276
   private void calculateMinMaxMean() {
277

  
278
      int i;
279
      double x, y;
280

  
281
      if (m_X.size() > 1) {
282
         m_dXMean = m_dXMin = m_dXMax = _X_Transform(((Double) m_X.get(0)).doubleValue());
283
         m_dYMean = m_dYMin = m_dYMax = _Y_Transform(((Double) m_Y.get(0)).doubleValue());
284
         for (i = 1; i < m_X.size(); i++) {
285
            m_dXMean += (x = ((Double) m_X.get(i)).doubleValue());
286
            m_dYMean += (y = ((Double) m_Y.get(i)).doubleValue());
287
            setMinMaxX(x);
288
            setMinMaxY(y);
289
         }
290
         m_dXMean /= m_X.size();
291
         m_dYMean /= m_X.size();
292
      }
293

  
294
   }
295

  
296

  
297
   private int getBestFitType() {
298

  
299
      int i;
300
      int iType = 1;
301
      final int iTypes = 6;
302
      double m_dBestR = 0;
303

  
304
      m_dR = 0;
305

  
306
      for (i = 1; i < iTypes + 1; i++) {
307
         calculate(i);
308
         if (m_dR > m_dBestR) {
309
            iType = i;
310
            m_dBestR = m_dR;
311
         }
312
      }
313

  
314
      return iType;
315

  
316
   }
317

  
318

  
319
   public double getCoeff() {
320

  
321
      return m_dCoeff;
322

  
323
   }
324

  
325

  
326
   public double getConstant() {
327

  
328
      return m_dConst;
329

  
330
   }
331

  
332

  
333
   public String getExpression() {
334

  
335
      final DecimalFormat df = new DecimalFormat("####.####");
336

  
337
      switch (m_iType) {
338
         case REGRESSION_Linear:
339
            return " y = " + df.format(m_dConst) + " + " + df.format(m_dCoeff) + "x";
340

  
341
         case REGRESSION_Rez_X:
342
            return " y = " + df.format(m_dConst) + " + " + df.format(m_dCoeff) + "/x";
343

  
344
         case REGRESSION_Rez_Y:
345
            return " y = " + df.format(m_dConst) + " / (" + df.format(m_dCoeff) + "- x)";
346

  
347
         case REGRESSION_Pow:
348
            return " y = " + df.format(m_dConst) + "x^" + df.format(m_dCoeff);
349

  
350
         case REGRESSION_Exp:
351
            return " y = " + df.format(m_dConst) + " ๏ฟฝ e^(" + df.format(m_dCoeff) + "x)";
352

  
353
         case REGRESSION_Log:
354
            return " y = " + df.format(m_dConst) + " + " + df.format(m_dCoeff) + " ๏ฟฝ ln(x)";
355
      }
356

  
357
      return "";
358

  
359
   }
360

  
361

  
362
   public double getR() {
363

  
364
      return m_dR;
365

  
366
   }
367

  
368

  
369
   public double getR2() {
370

  
371
      return m_dR * m_dR;
372

  
373
   }
374

  
375

  
376
   public void getRestrictedSample(final double[] x,
377
                                   final double[] y,
378
                                   final int nPoints) {
379

  
380
      int i;
381
      int iIndex;
382

  
383
      for (i = 0; i < nPoints; i++) {
384
         iIndex = (int) (Math.random() * m_X.size());
385
         x[i] = ((Double) m_X.get(iIndex)).doubleValue();
386
         y[i] = ((Double) m_Y.get(iIndex)).doubleValue();
387
      }
388

  
389
   }
390

  
391

  
392
   public double getX(double y) {
393

  
394
      if (m_X.size() > 0.0) {
395
         switch (m_iType) {
396
            case REGRESSION_Linear: // Y = a + b * X -> X = (Y - a) / b
397
               if (m_dCoeff != 0.0) {
398
                  return ((m_dConst * y) / m_dCoeff);
399
               }
400

  
401
            case REGRESSION_Rez_X: // Y = a + b / X -> X = b / (Y - a)
402
               if ((y = y - m_dConst) != 0.0) {
403
                  return (m_dCoeff / y);
404
               }
405

  
406
            case REGRESSION_Rez_Y: // Y = a / (b - X) -> X = b - a / Y
407
               if (y != 0.0) {
408
                  return (m_dCoeff - m_dConst / y);
409
               }
410

  
411
            case REGRESSION_Pow: // Y = a * X^b -> X = (Y / a)^(1 / b)
412
               if (m_dConst != 0.0 && m_dCoeff != 0.0) {
413
                  return (Math.pow(y / m_dConst, 1.0 / m_dCoeff));
414
               }
415

  
416
            case REGRESSION_Exp: // Y = a * e^(b * X) -> X = ln(Y / a) / b
417
               if (m_dConst != 0.0 && (y = y / m_dConst) > 0.0 && m_dCoeff != 0.0) {
418
                  return (Math.log(y) / m_dCoeff);
419
               }
420

  
421
            case REGRESSION_Log: // Y = a + b * ln(X) -> X = e^((Y - a) / b)
422
               if (m_dCoeff != 0.0) {
423
                  return (Math.exp((y - m_dConst) / m_dCoeff));
424
               }
425
         }
426
      }
427

  
428
      return (Double.NEGATIVE_INFINITY );
429
   }
430

  
431

  
432
   public double getXMax() {
433

  
434
      return m_dXMax;
435

  
436
   }
437

  
438

  
439
   public double getXMin() {
440

  
441
      return m_dXMin;
442

  
443
   }
444

  
445

  
446
   public double getXVar() {
447

  
448
      return m_dXVar;
449

  
450
   }
451

  
452

  
453
   public double getY(double x) {
454

  
455
      if (m_X.size() > 0.0) {
456
         switch (m_iType) {
457
            case REGRESSION_Linear: // Y = a + b * X
458
               return (m_dConst + m_dCoeff * x);
459

  
460
            case REGRESSION_Rez_X: // Y = a + b / X
461
               if (x != 0.0) {
462
                  return (m_dConst + m_dCoeff / x);
463
               }
464

  
465
            case REGRESSION_Rez_Y: // Y = a / (b - X)
466
               if ((x = m_dCoeff - x) != 0.0) {
467
                  return (m_dConst / x);
468
               }
469

  
470
            case REGRESSION_Pow: // Y = a * X^b
471
               return (m_dConst * Math.pow(x, m_dCoeff));
472

  
473
            case REGRESSION_Exp: // Y = a e^(b * X)
474
               return (m_dConst * Math.exp(m_dCoeff * x));
475

  
476
            case REGRESSION_Log: // Y = a + b * ln(X)
477
               if (x > 0.0) {
478
                  return (m_dConst + m_dCoeff * Math.log(x));
479
               }
480
         }
481
      }
482

  
483
      return (Double.NEGATIVE_INFINITY );
484
   }
485

  
486

  
487
   public double getYMax() {
488

  
489
      return m_dYMax;
490

  
491
   }
492

  
493

  
494
   public double getYMin() {
495

  
496
      return m_dYMin;
497

  
498
   }
499

  
500

  
501
   public double getYVar() {
502

  
503
      return m_dYVar;
504

  
505
   }
506

  
507

  
508
   private void setMinMaxX(final double x) {
509

  
510
      if (x > m_dXMax) {
511
         m_dXMax = x;
512
      }
513
      if (x < m_dXMin) {
514
         m_dXMin = x;
515
      }
516

  
517
   }
518

  
519

  
520
   private void setMinMaxY(final double y) {
521

  
522
      if (y > m_dYMax) {
523
         m_dYMax = y;
524
      }
525
      if (y < m_dYMin) {
526
         m_dYMin = y;
527
      }
528

  
529
   }
530

  
531
}
0 532

  
org.gvsig.toolbox/tags/org.gvsig.toolbox-1.0.43/org.gvsig.toolbox.math/src/main/java/es/unex/sextante/math/simpleStats/SimpleStats.java
1
package es.unex.sextante.math.simpleStats;
2

  
3

  
4
/**
5
 * A class to calculate some basic statistics
6
 * 
7
 * @author volaya
8
 * 
9
 */
10
public class SimpleStats {
11

  
12
   //private final ArrayList m_Values;
13
   private double m_dSum;
14
   private double m_dRMS;
15
   private double m_dMean;
16
   private double m_dVariance;
17
   private double m_dMax, m_dMin;
18
   private double m_dStdDev;
19
   //private boolean   m_bCalculated;
20
   private int    m_iCount;
21
   private double m_dM2;
22

  
23

  
24
   public SimpleStats() {
25

  
26
      //m_Values = new ArrayList();
27
      //m_bCalculated = false;
28
      m_iCount = 0;
29
      m_dMax = Double.NEGATIVE_INFINITY;
30
      m_dMin = Double.MAX_VALUE;
31
      m_dSum = 0;
32
      m_dRMS = 0;
33
      m_dM2 = 0;
34

  
35
   }
36

  
37

  
38
   /**
39
    * adds a new value to the list of values
40
    * 
41
    * @param dValue
42
    *                the value
43
    */
44
   public void addValue(final double dValue) {
45

  
46
      m_iCount++;
47

  
48
      final double dDelta = dValue - m_dMean;
49
      m_dMean = m_dMean + dDelta / m_iCount;
50
      m_dM2 = m_dM2 + dDelta * (dValue - m_dMean);
51
      m_dVariance = m_dM2 / (m_iCount - 1);
52

  
53
      m_dSum += dValue;
54
      m_dRMS += dValue * dValue;
55
      m_dMax = Math.max(m_dMax, dValue);
56
      m_dMin = Math.min(m_dMin, dValue);
57

  
58
      m_dStdDev = Math.sqrt(m_dVariance);
59
      m_dRMS = Math.sqrt(m_dRMS / m_iCount);
60

  
61
   }
62

  
63

  
64
   /**
65
    * Returns the coefficient of variation
66
    * 
67
    * @return the coefficient of variation
68
    */
69
   public double getCoeffOfVar() {
70

  
71
      return m_dVariance / m_dMean;
72

  
73
   }
74

  
75

  
76
   /**
77
    * Return the number of values added
78
    * 
79
    * @return the number of values added
80
    */
81
   public int getCount() {
82

  
83
      return m_iCount;
84

  
85
   }
86

  
87

  
88
   /**
89
    * Returns the maximum value
90
    * 
91
    * @return the maximum value
92
    */
93
   public double getMax() {
94

  
95
      return m_dMax;
96

  
97
   }
98

  
99

  
100
   /**
101
    * Returns the mean value
102
    * 
103
    * @return the mean value
104
    */
105
   public double getMean() {
106

  
107
      return m_dMean;
108

  
109
   }
110

  
111

  
112
   /**
113
    * Returns the minimum value
114
    * 
115
    * @return the minimum value
116
    */
117
   public double getMin() {
118

  
119
      return m_dMin;
120

  
121
   }
122

  
123

  
124
   /**
125
    * Returns the Root Mean Squared
126
    * 
127
    * @return the Root Mean Squared
128
    */
129
   public double getRMS() {
130

  
131
      return m_dRMS;
132

  
133
   }
134

  
135

  
136
   /**
137
    * Returns the standard deviation
138
    * 
139
    * @return the standard deviation
140
    */
141
   public double getStdDev() {
142

  
143
      return m_dStdDev;
144

  
145
   }
146

  
147

  
148
   /**
149
    * Returns the sum of all values
150
    * 
151
    * @return the sum of all values
152
    */
153
   public double getSum() {
154

  
155
      return m_dSum;
156

  
157
   }
158

  
159

  
160
   /**
161
    * Returns the variance
162
    * 
163
    * @return the variance
164
    */
165
   public double getVariance() {
166

  
167
      return m_dVariance;
168

  
169
   }
170

  
171
}
0 172

  
org.gvsig.toolbox/tags/org.gvsig.toolbox-1.0.43/org.gvsig.toolbox.math/src/main/java/es/unex/sextante/math/pdf/PDF.java
1
/*************************************************************
2
 *
3
 * A reduced version of Michael Thomas Flanagan's Stat Class
4
 * with PDFs and CDFs
5
 *
6
 * http://www.ee.ucl.ac.uk/~mflanaga/java/Stat.html
7
 *
8
 **************************************************************/
9

  
10
package es.unex.sextante.math.pdf;
11

  
12
/**
13
 * Statistical functions with PDFs and CDFs. Based on Michael Thomas Flanagan's Stat Class
14
 *
15
 * @author volaya
16
 *
17
 */
18
public class PDF {
19

  
20
   // GAMMA FUNCTIONS
21
   // Lanczos Gamma Function approximation - N (number of coefficients -1)
22
   private static int         lgfN     = 6;
23

  
24
   // Lanczos Gamma Function approximation - Coefficients
25
   private static double[]    lgfCoeff = { 1.000000000190015, 76.18009172947146, -86.50532032941677, 24.01409824083091,
26
            -1.231739572450155, 0.1208650973866179E-2, -0.5395239384953E-5 };
27

  
28
   // Lanczos Gamma Function approximation - small gamma
29
   private static double      lgfGamma = 5.0;
30

  
31
   // Maximum number of iterations allowed in Incomplete Gamma Function
32
   // calculations
33
   private static int         igfiter  = 1000;
34

  
35
   // Tolerance used in terminating series in Incomplete Gamma Function
36
   // calculations
37
   private static double      igfeps   = 1e-8;
38

  
39
   public static final double FPMIN    = 1e-300;
40

  
41

  
42
   // BETA DISTRIBUTIONS AND FUNCTIONS
43
   // beta distribution cdf
44
   public static double betaCDF(final double alpha,
45
                                final double beta,
46
                                final double limit) {
47

  
48
      return betaCDF(0.0D, 1.0D, alpha, beta, limit);
49
   }
50

  
51

  
52
   // beta distribution pdf
53
   public static double betaCDF(final double min,
54
                                final double max,
55
                                final double alpha,
56
                                final double beta,
57
                                final double limit) {
58

  
59
      if (alpha <= 0.0D) {
60
         throw new IllegalArgumentException("The shape parameter, alpha, " + alpha + "must be greater than zero");
61
      }
62
      if (beta <= 0.0D) {
63
         throw new IllegalArgumentException("The shape parameter, beta, " + beta + "must be greater than zero");
64
      }
65
      if (limit < min) {
66
         throw new IllegalArgumentException("limit, " + limit + ", must be greater than or equal to the minimum value, " + min);
67
      }
68
      if (limit > max) {
69
         throw new IllegalArgumentException("limit, " + limit + ", must be less than or equal to the maximum value, " + max);
70
      }
71
      return PDF.regularisedBetaFunction(alpha, beta, (limit - min) / (max - min));
72
   }
73

  
74

  
75
   // Beta function
76
   public static double betaFunction(final double z,
77
                                     final double w) {
78

  
79
      return Math.exp(logGamma(z) + logGamma(w) - logGamma(z + w));
80
   }
81

  
82

  
83
   // beta distribution pdf
84
   public static double betaPDF(final double alpha,
85
                                final double beta,
86
                                final double x) {
87

  
88
      return betaPDF(0.0D, 1.0D, alpha, beta, x);
89
   }
90

  
91

  
92
   // beta distribution pdf
93
   public static double betaPDF(final double min,
94
                                final double max,
95
                                final double alpha,
96
                                final double beta,
97
                                final double x) {
98

  
99
      if (alpha <= 0.0D) {
100
         throw new IllegalArgumentException("The shape parameter, alpha, " + alpha + "must be greater than zero");
101
      }
102
      if (beta <= 0.0D) {
103
         throw new IllegalArgumentException("The shape parameter, beta, " + beta + "must be greater than zero");
104
      }
105
      if (x < min) {
106
         throw new IllegalArgumentException("x, " + x + ", must be greater than or equal to the minimum value, " + min);
107
      }
108
      if (x > max) {
109
         throw new IllegalArgumentException("x, " + x + ", must be less than or equal to the maximum value, " + max);
110
      }
111
      final double pdf = Math.pow(x - min, alpha - 1) * Math.pow(max - x, beta - 1) / Math.pow(max - min, alpha + beta - 1);
112
      return pdf / PDF.betaFunction(alpha, beta);
113
   }
114

  
115

  
116
   // Returns a binomial mass probabilty function
117
   public static double binomial(final double p,
118
                                 final int n,
119
                                 final int k) {
120

  
121
      if (k < 0 || n < 0) {
122
         throw new IllegalArgumentException("\nn and k must be greater than or equal to zero");
123
      }
124
      if (k > n) {
125
         throw new IllegalArgumentException("\nk is greater than n");
126
      }
127
      return Math.floor(0.5D + Math.exp(PDF.logFactorial(n) - PDF.logFactorial(k) - PDF.logFactorial(n - k))) * Math.pow(p, k)
128
             * Math.pow(1.0D - p, n - k);
129
   }
130

  
131

  
132
   // Returns the binomial cumulative probabilty
133
   public static double binomialCDF(final double p,
134
                                    final int n,
135
                                    final int k) {
136

  
... This diff was truncated because it exceeds the maximum size that can be displayed.

Also available in: Unified diff