View Javadoc
1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements.  See the NOTICE file distributed with
4    * this work for additional information regarding copyright ownership.
5    * The ASF licenses this file to You under the Apache License, Version 2.0
6    * (the "License"); you may not use this file except in compliance with
7    * the License.  You may obtain a copy of the License at
8    *
9    *      http://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS,
13   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14   * See the License for the specific language governing permissions and
15   * limitations under the License.
16   */
17  package org.apache.commons.math4.legacy.stat.descriptive.rank;
18  
19  import java.util.Arrays;
20  
21  import org.apache.commons.statistics.distribution.ContinuousDistribution;
22  import org.apache.commons.math4.legacy.distribution.AbstractRealDistribution;
23  import org.apache.commons.statistics.distribution.NormalDistribution;
24  import org.apache.commons.math4.legacy.exception.MathIllegalArgumentException;
25  import org.apache.commons.math4.legacy.exception.NotANumberException;
26  import org.apache.commons.math4.legacy.exception.NotPositiveException;
27  import org.apache.commons.math4.legacy.exception.NotStrictlyPositiveException;
28  import org.apache.commons.math4.legacy.exception.NullArgumentException;
29  import org.apache.commons.math4.legacy.exception.NumberIsTooLargeException;
30  import org.apache.commons.math4.legacy.exception.OutOfRangeException;
31  import org.apache.commons.rng.simple.RandomSource;
32  import org.apache.commons.math4.legacy.stat.descriptive.UnivariateStatistic;
33  import org.apache.commons.math4.legacy.stat.descriptive.UnivariateStatisticAbstractTest;
34  import org.apache.commons.math4.legacy.stat.ranking.NaNStrategy;
35  import org.junit.Assert;
36  import org.junit.Before;
37  import org.junit.Test;
38  
39  /**
40   * Test cases for the {@link UnivariateStatistic} class.
41   */
42  public class PercentileTest extends UnivariateStatisticAbstractTest{
43  
44      protected Percentile stat;
45  
46      private double quantile;
47  
48      /**
49       * {@link org.apache.commons.math4.legacy.stat.descriptive.rank.Percentile.EstimationType type}
50       * of estimation to be used while calling {@link #getUnivariateStatistic()}
51       */
52      private Percentile.EstimationType type;
53  
54      /**
55       * {@link NaNStrategy}
56       * of estimation to be used while calling {@link #getUnivariateStatistic()}
57       */
58      private NaNStrategy nanStrategy;
59  
60      /**
61       * kth selector
62       */
63      private KthSelector kthSelector;
64  
65      /**
66       * A default percentile to be used for {@link #getUnivariateStatistic()}
67       */
68      protected final double DEFAULT_PERCENTILE = 95d;
69  
70      /**
71       * Before method to ensure defaults retained
72       */
73      @Before
74      public void before() {
75          quantile         = 95.0;
76          type             = Percentile.EstimationType.LEGACY;
77          nanStrategy      = NaNStrategy.REMOVED;
78          kthSelector      = new KthSelector(new MedianOf3PivotingStrategy());
79      }
80  
81      private void reset(final double p, final Percentile.EstimationType type) {
82          this.quantile = p;
83          this.type     = type;
84          nanStrategy   = (type == Percentile.EstimationType.LEGACY) ? NaNStrategy.FIXED : NaNStrategy.REMOVED;
85      }
86  
87      /**
88       * {@inheritDoc}
89       */
90      @Override
91      public Percentile getUnivariateStatistic() {
92          return new Percentile(quantile).
93                  withEstimationType(type).
94                  withNaNStrategy(nanStrategy).
95                  withKthSelector(kthSelector);
96      }
97  
98      /**
99       * {@inheritDoc}
100      */
101     @Override
102     public double expectedValue() {
103         return this.percentile95;
104     }
105 
106     @Test
107     public void testHighPercentile(){
108         final double[] d = new double[]{1, 2, 3};
109         final Percentile p = new Percentile(75);
110         Assert.assertEquals(3.0, p.evaluate(d), 1.0e-5);
111     }
112 
113     @Test
114     public void testLowPercentile() {
115         final double[] d = new double[] {0, 1};
116         final Percentile p = new Percentile(25);
117         Assert.assertEquals(0d, p.evaluate(d), Double.MIN_VALUE);
118     }
119 
120     @Test
121     public void testPercentile() {
122         final double[] d = new double[] {1, 3, 2, 4};
123         final Percentile p = new Percentile(30);
124         Assert.assertEquals(1.5, p.evaluate(d), 1.0e-5);
125         p.setQuantile(25);
126         Assert.assertEquals(1.25, p.evaluate(d), 1.0e-5);
127         p.setQuantile(75);
128         Assert.assertEquals(3.75, p.evaluate(d), 1.0e-5);
129         p.setQuantile(50);
130         Assert.assertEquals(2.5, p.evaluate(d), 1.0e-5);
131 
132         // invalid percentiles
133         try {
134             p.evaluate(d, 0, d.length, -1.0);
135             Assert.fail();
136         } catch (final MathIllegalArgumentException ex) {
137             // success
138         }
139         try {
140             p.evaluate(d, 0, d.length, 101.0);
141             Assert.fail();
142         } catch (final MathIllegalArgumentException ex) {
143             // success
144         }
145     }
146 
147     @Test
148     public void testNISTExample() {
149         final double[] d = new double[] {95.1772, 95.1567, 95.1937, 95.1959,
150                 95.1442, 95.0610,  95.1591, 95.1195, 95.1772, 95.0925, 95.1990, 95.1682
151         };
152         final Percentile p = new Percentile(90);
153         Assert.assertEquals(95.1981, p.evaluate(d), 1.0e-4);
154         Assert.assertEquals(95.1990, p.evaluate(d,0,d.length, 100d), 0);
155     }
156 
157     @Test
158     public void test5() {
159         final Percentile percentile = new Percentile(5);
160         Assert.assertEquals(this.percentile5, percentile.evaluate(testArray), getTolerance());
161     }
162 
163     @Test
164     public void testNullEmpty() {
165         final Percentile percentile = new Percentile(50);
166         final double[] nullArray = null;
167         final double[] emptyArray = new double[] {};
168         try {
169             percentile.evaluate(nullArray);
170             Assert.fail("Expecting NullArgumentException for null array");
171         } catch (final NullArgumentException ex) {
172             // expected
173         }
174         Assert.assertTrue(Double.isNaN(percentile.evaluate(emptyArray)));
175     }
176 
177     @Test
178     public void testSingleton() {
179         final Percentile percentile = new Percentile(50);
180         final double[] singletonArray = new double[] {1d};
181         Assert.assertEquals(1d, percentile.evaluate(singletonArray), 0);
182         Assert.assertEquals(1d, percentile.evaluate(singletonArray, 0, 1), 0);
183         Assert.assertEquals(1d, percentile.evaluate(singletonArray, 0, 1, 5), 0);
184         Assert.assertEquals(1d, percentile.evaluate(singletonArray, 0, 1, 100), 0);
185         Assert.assertTrue(Double.isNaN(percentile.evaluate(singletonArray, 0, 0)));
186     }
187 
188     @Test
189     public void testSpecialValues() {
190         final Percentile percentile = new Percentile(50);
191         double[] specialValues = new double[] {0d, 1d, 2d, 3d, 4d,  Double.NaN};
192         Assert.assertEquals(/*2.5d*/2d, percentile.evaluate(specialValues), 0);
193         specialValues =  new double[] {Double.NEGATIVE_INFINITY, 1d, 2d, 3d,
194                 Double.NaN, Double.POSITIVE_INFINITY};
195         Assert.assertEquals(/*2.5d*/2d, percentile.evaluate(specialValues), 0);
196         specialValues = new double[] {1d, 1d, Double.POSITIVE_INFINITY,
197                 Double.POSITIVE_INFINITY};
198         Assert.assertTrue(Double.isInfinite(percentile.evaluate(specialValues)));
199         specialValues = new double[] {1d, 1d, Double.NaN,
200                 Double.NaN};
201         Assert.assertFalse(Double.isNaN(percentile.evaluate(specialValues)));
202         Assert.assertEquals(1d, percentile.evaluate(specialValues), 0.0);
203         specialValues = new double[] {1d, 1d, Double.NEGATIVE_INFINITY,
204                 Double.NEGATIVE_INFINITY};
205         // Interpolation results in NEGATIVE_INFINITY + POSITIVE_INFINITY
206         Assert.assertTrue(Double.isNaN(percentile.evaluate(specialValues)));
207     }
208 
209     @Test
210     public void testSetQuantile() {
211         final Percentile percentile = new Percentile(10);
212         percentile.setQuantile(100); // OK
213         Assert.assertEquals(100, percentile.getQuantile(), 0);
214         try {
215             percentile.setQuantile(0);
216             Assert.fail("Expecting MathIllegalArgumentException");
217         } catch (final MathIllegalArgumentException ex) {
218             // expected
219         }
220         try {
221             new Percentile(0);
222             Assert.fail("Expecting MathIllegalArgumentException");
223         } catch (final MathIllegalArgumentException ex) {
224             // expected
225         }
226     }
227 
228     //Below tests are basically to run for all estimation types.
229     /**
230      * While {@link #testHighPercentile()} checks only for the existing
231      * implementation; this method verifies for all the types including Percentile.Type.CM Percentile.Type.
232      */
233     @Test
234     public void testAllTechniquesHighPercentile() {
235         final double[] d = new double[] { 1, 2, 3 };
236         testAssertMappedValues(d, new Object[][] { { Percentile.EstimationType.LEGACY, 3d }, { Percentile.EstimationType.R_1, 3d },
237                 { Percentile.EstimationType.R_2, 3d }, { Percentile.EstimationType.R_3, 2d }, { Percentile.EstimationType.R_4, 2.25 }, { Percentile.EstimationType.R_5, 2.75 },
238                 { Percentile.EstimationType.R_6, 3d }, { Percentile.EstimationType.R_7, 2.5 },{ Percentile.EstimationType.R_8, 2.83333 }, {Percentile.EstimationType.R_9,2.81250} },
239                 75d, 1.0e-5);
240     }
241 
242     @Test
243     public void testAllTechniquesLowPercentile() {
244         final double[] d = new double[] { 0, 1 };
245         testAssertMappedValues(d, new Object[][] { { Percentile.EstimationType.LEGACY, 0d }, { Percentile.EstimationType.R_1, 0d },
246                 { Percentile.EstimationType.R_2, 0d }, { Percentile.EstimationType.R_3, 0d }, { Percentile.EstimationType.R_4, 0d }, {Percentile.EstimationType.R_5, 0d}, {Percentile.EstimationType.R_6, 0d},
247                 { Percentile.EstimationType.R_7, 0.25 }, { Percentile.EstimationType.R_8, 0d }, {Percentile.EstimationType.R_9, 0d} },
248                 25d, Double.MIN_VALUE);
249     }
250 
251     public void checkAllTechniquesPercentile() {
252         final double[] d = new double[] { 1, 3, 2, 4 };
253 
254         testAssertMappedValues(d, new Object[][] { { Percentile.EstimationType.LEGACY, 1.5d },
255                 { Percentile.EstimationType.R_1, 2d }, { Percentile.EstimationType.R_2, 2d }, { Percentile.EstimationType.R_3, 1d }, { Percentile.EstimationType.R_4, 1.2 }, {Percentile.EstimationType.R_5, 1.7},
256                 { Percentile.EstimationType.R_6, 1.5 },{ Percentile.EstimationType.R_7, 1.9 }, { Percentile.EstimationType.R_8, 1.63333 },{ Percentile.EstimationType.R_9, 1.65 } },
257                 30d, 1.0e-05);
258 
259         testAssertMappedValues(d, new Object[][] { { Percentile.EstimationType.LEGACY, 1.25d },
260                 { Percentile.EstimationType.R_1, 1d }, { Percentile.EstimationType.R_2, 1.5d }, { Percentile.EstimationType.R_3, 1d }, { Percentile.EstimationType.R_4, 1d }, {Percentile.EstimationType.R_5, 1.5},
261                 { Percentile.EstimationType.R_6, 1.25 },{ Percentile.EstimationType.R_7, 1.75 },
262                 { Percentile.EstimationType.R_8, 1.41667 }, { Percentile.EstimationType.R_9, 1.43750 } }, 25d, 1.0e-05);
263 
264         testAssertMappedValues(d, new Object[][] { { Percentile.EstimationType.LEGACY, 3.75d },
265                 { Percentile.EstimationType.R_1, 3d }, { Percentile.EstimationType.R_2, 3.5d }, { Percentile.EstimationType.R_3, 3d }, { Percentile.EstimationType.R_4, 3d },
266                 { Percentile.EstimationType.R_5, 3.5d },{ Percentile.EstimationType.R_6, 3.75d }, { Percentile.EstimationType.R_7, 3.25 },
267                 { Percentile.EstimationType.R_8, 3.58333 },{ Percentile.EstimationType.R_9, 3.56250} }, 75d, 1.0e-05);
268 
269         testAssertMappedValues(d, new Object[][] { { Percentile.EstimationType.LEGACY, 2.5d },
270                 { Percentile.EstimationType.R_1, 2d }, { Percentile.EstimationType.R_2, 2.5d }, { Percentile.EstimationType.R_3, 2d }, { Percentile.EstimationType.R_4, 2d },
271                 { Percentile.EstimationType.R_5, 2.5 },{ Percentile.EstimationType.R_6, 2.5 },{ Percentile.EstimationType.R_7, 2.5 },
272                 { Percentile.EstimationType.R_8, 2.5 },{ Percentile.EstimationType.R_9, 2.5 } }, 50d, 1.0e-05);
273 
274         // invalid percentiles
275         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
276             try {
277                 reset(-1.0, e);
278                 getUnivariateStatistic().evaluate(d, 0, d.length);
279                 Assert.fail();
280             } catch (final MathIllegalArgumentException ex) {
281                 // success
282             }
283         }
284 
285         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
286             try {
287                 reset(101.0, e);
288                 getUnivariateStatistic().evaluate(d, 0, d.length);
289                 Assert.fail();
290             } catch (final MathIllegalArgumentException ex) {
291                 // success
292             }
293         }
294     }
295 
296     @Test
297     public void testAllTechniquesPercentileUsingMedianOf3Pivoting() {
298         kthSelector = new KthSelector(new MedianOf3PivotingStrategy());
299         Assert.assertEquals(MedianOf3PivotingStrategy.class,
300                             getUnivariateStatistic().getPivotingStrategy().getClass());
301         checkAllTechniquesPercentile();
302     }
303 
304     @Test
305     public void testAllTechniquesPercentileUsingCentralPivoting() {
306         kthSelector = new KthSelector(new CentralPivotingStrategy());
307         Assert.assertEquals(CentralPivotingStrategy.class,
308                             getUnivariateStatistic().getPivotingStrategy().getClass());
309         checkAllTechniquesPercentile();
310     }
311 
312     @Test
313     public void testAllTechniquesPercentileUsingRandomPivoting() {
314         kthSelector = new KthSelector(new RandomPivotingStrategy(RandomSource.WELL_1024_A, 0x268a7fb4194240f6L));
315         Assert.assertEquals(RandomPivotingStrategy.class,
316                             getUnivariateStatistic().getPivotingStrategy().getClass());
317         checkAllTechniquesPercentile();
318     }
319 
320     @Test
321     public void testAllTechniquesNISTExample() {
322         final double[] d =
323                 new double[] { 95.1772, 95.1567, 95.1937, 95.1959, 95.1442,
324                         95.0610, 95.1591, 95.1195, 95.1772, 95.0925, 95.1990,
325                         95.1682 };
326 
327         testAssertMappedValues(d, new Object[][] { { Percentile.EstimationType.LEGACY, 95.1981 },
328                 { Percentile.EstimationType.R_1, 95.19590 }, { Percentile.EstimationType.R_2, 95.19590 }, { Percentile.EstimationType.R_3, 95.19590 },
329                 { Percentile.EstimationType.R_4, 95.19546 }, { Percentile.EstimationType.R_5, 95.19683 }, { Percentile.EstimationType.R_6, 95.19807 },
330                 { Percentile.EstimationType.R_7, 95.19568 }, { Percentile.EstimationType.R_8, 95.19724 }, { Percentile.EstimationType.R_9, 95.19714 } }, 90d,
331                 1.0e-04);
332 
333         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
334             reset(100.0, e);
335             Assert.assertEquals(95.1990, getUnivariateStatistic().evaluate(d), 1.0e-4);
336         }
337     }
338 
339     @Test
340     public void testAllTechniques5() {
341         reset(5, Percentile.EstimationType.LEGACY);
342         final UnivariateStatistic percentile = getUnivariateStatistic();
343         Assert.assertEquals(this.percentile5, percentile.evaluate(testArray),
344                 getTolerance());
345         testAssertMappedValues(testArray,
346                 new Object[][] { { Percentile.EstimationType.LEGACY, percentile5 }, { Percentile.EstimationType.R_1, 8.8000 },
347                         { Percentile.EstimationType.R_2, 8.8000 }, { Percentile.EstimationType.R_3, 8.2000 }, { Percentile.EstimationType.R_4, 8.2600 },
348                         { Percentile.EstimationType.R_5, 8.5600 }, { Percentile.EstimationType.R_6, 8.2900 },
349                         { Percentile.EstimationType.R_7, 8.8100 }, { Percentile.EstimationType.R_8, 8.4700 },
350                         { Percentile.EstimationType.R_9, 8.4925 }}, 5d, getTolerance());
351     }
352 
353     @Test
354     public void testAllTechniquesNullEmpty() {
355 
356         final double[] nullArray = null;
357         final double[] emptyArray = new double[] {};
358         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
359             reset (50, e);
360             final UnivariateStatistic percentile = getUnivariateStatistic();
361             try {
362                 percentile.evaluate(nullArray);
363                 Assert.fail("Expecting NullArgumentException "
364                         + "for null array");
365             } catch (final NullArgumentException ex) {
366                 // expected
367             }
368             Assert.assertTrue(Double.isNaN(percentile.evaluate(emptyArray)));
369         }
370     }
371 
372     @Test
373     public void testAllTechniquesSingleton() {
374         final double[] singletonArray = new double[] { 1d };
375         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
376             reset (50, e);
377             final UnivariateStatistic percentile = getUnivariateStatistic();
378             Assert.assertEquals(1d, percentile.evaluate(singletonArray), 0);
379             Assert.assertEquals(1d, percentile.evaluate(singletonArray, 0, 1),
380                     0);
381             Assert.assertEquals(1d,
382                     new Percentile().evaluate(singletonArray, 0, 1, 5), 0);
383             Assert.assertEquals(1d,
384                     new Percentile().evaluate(singletonArray, 0, 1, 100), 0);
385             Assert.assertTrue(Double.isNaN(percentile.evaluate(singletonArray,
386                     0, 0)));
387         }
388     }
389 
390     @Test
391     public void testAllTechniquesEmpty() {
392         final double[] singletonArray = new double[] { };
393         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
394             reset (50, e);
395             final UnivariateStatistic percentile = getUnivariateStatistic();
396             Assert.assertEquals(Double.NaN, percentile.evaluate(singletonArray),
397                     0);
398             Assert.assertEquals(Double.NaN, percentile.evaluate(singletonArray,
399                     0, 0),
400                     0);
401             Assert.assertEquals(Double.NaN,
402                     new Percentile().evaluate(singletonArray, 0, 0, 5), 0);
403             Assert.assertEquals(Double.NaN,
404                     new Percentile().evaluate(singletonArray, 0, 0, 100), 0);
405             Assert.assertTrue(Double.isNaN(percentile.evaluate(singletonArray,
406                     0, 0)));
407         }
408     }
409 
410     @Test
411     public void testReplaceNanInRange() {
412         final double[] specialValues =
413                 new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN, Double.NaN, 5d,
414                 7d, Double.NaN, 8d};
415         Assert.assertEquals(/*Double.NaN*/3.5,new Percentile(50d).evaluate(specialValues),0d);
416         reset (50, Percentile.EstimationType.R_1);
417         Assert.assertEquals(3d, getUnivariateStatistic().evaluate(specialValues),0d);
418         reset (50, Percentile.EstimationType.R_2);
419         Assert.assertEquals(3.5d, getUnivariateStatistic().evaluate(specialValues),0d);
420         Assert.assertEquals(Double.POSITIVE_INFINITY,new Percentile(70)
421                                         .withNaNStrategy(NaNStrategy.MAXIMAL)
422                                         .evaluate(specialValues),0d);
423     }
424 
425     @Test
426     public void testRemoveNan() {
427         final double[] specialValues =
428                 new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN };
429         final double[] expectedValues =
430                 new double[] { 0d, 1d, 2d, 3d, 4d };
431         reset (50, Percentile.EstimationType.R_1);
432         Assert.assertEquals(2.0, getUnivariateStatistic().evaluate(specialValues), 0d);
433         Assert.assertEquals(2.0, getUnivariateStatistic().evaluate(expectedValues),0d);
434         Assert.assertTrue(Double.isNaN(getUnivariateStatistic().evaluate(specialValues,5,1)));
435         Assert.assertEquals(4d, getUnivariateStatistic().evaluate(specialValues, 4, 2), 0d);
436         Assert.assertEquals(3d, getUnivariateStatistic().evaluate(specialValues,3,3),0d);
437         reset(50, Percentile.EstimationType.R_2);
438         Assert.assertEquals(3.5d, getUnivariateStatistic().evaluate(specialValues,3,3),0d);
439     }
440 
441     @Test
442     public void testPercentileCopy() {
443        reset(50d, Percentile.EstimationType.LEGACY);
444        final Percentile original = getUnivariateStatistic();
445        final Percentile copy = new Percentile(original);
446        Assert.assertEquals(original.getNaNStrategy(),copy.getNaNStrategy());
447        Assert.assertEquals(original.getQuantile(), copy.getQuantile(),0d);
448        Assert.assertEquals(original.getEstimationType(),copy.getEstimationType());
449        Assert.assertEquals(NaNStrategy.FIXED, original.getNaNStrategy());
450     }
451 
452     @Test
453     public void testAllTechniquesSpecialValues() {
454         reset(50d, Percentile.EstimationType.LEGACY);
455         final UnivariateStatistic percentile = getUnivariateStatistic();
456         double[] specialValues =
457                 new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN };
458         Assert.assertEquals(2.5d, percentile.evaluate(specialValues), 0);
459 
460         testAssertMappedValues(specialValues, new Object[][] {
461                 { Percentile.EstimationType.LEGACY, 2.5d }, { Percentile.EstimationType.R_1, 2.0 }, { Percentile.EstimationType.R_2, 2.0 }, { Percentile.EstimationType.R_3, 1.0 },
462                 { Percentile.EstimationType.R_4, 1.5 }, { Percentile.EstimationType.R_5, 2.0 }, { Percentile.EstimationType.R_6, 2.0 },
463                 { Percentile.EstimationType.R_7, 2.0 }, { Percentile.EstimationType.R_8, 2.0 }, { Percentile.EstimationType.R_9, 2.0 }}, 50d, 0d);
464 
465         specialValues =
466                 new double[] { Double.NEGATIVE_INFINITY, 1d, 2d, 3d,
467                         Double.NaN, Double.POSITIVE_INFINITY };
468         Assert.assertEquals(2.5d, percentile.evaluate(specialValues), 0);
469 
470         testAssertMappedValues(specialValues, new Object[][] {
471                 { Percentile.EstimationType.LEGACY, 2.5d }, { Percentile.EstimationType.R_1, 2.0 }, { Percentile.EstimationType.R_2, 2.0 }, { Percentile.EstimationType.R_3, 1.0 },
472                 { Percentile.EstimationType.R_4, 1.5 }, { Percentile.EstimationType.R_5, 2.0 }, { Percentile.EstimationType.R_7, 2.0 }, { Percentile.EstimationType.R_7, 2.0 },
473                 { Percentile.EstimationType.R_8, 2.0 }, { Percentile.EstimationType.R_9, 2.0 } }, 50d, 0d);
474 
475         specialValues =
476                 new double[] { 1d, 1d, Double.POSITIVE_INFINITY,
477                         Double.POSITIVE_INFINITY };
478         Assert.assertTrue(Double.isInfinite(percentile.evaluate(specialValues)));
479 
480         testAssertMappedValues(specialValues, new Object[][] {
481                 // This is one test not matching with R results.
482                 { Percentile.EstimationType.LEGACY, Double.POSITIVE_INFINITY },
483                 { Percentile.EstimationType.R_1,/* 1.0 */Double.NaN },
484                 { Percentile.EstimationType.R_2, /* Double.POSITIVE_INFINITY */Double.NaN },
485                 { Percentile.EstimationType.R_3, /* 1.0 */Double.NaN }, { Percentile.EstimationType.R_4, /* 1.0 */Double.NaN },
486                 { Percentile.EstimationType.R_5, Double.POSITIVE_INFINITY },
487                 { Percentile.EstimationType.R_6, Double.POSITIVE_INFINITY },
488                 { Percentile.EstimationType.R_7, Double.POSITIVE_INFINITY },
489                 { Percentile.EstimationType.R_8, Double.POSITIVE_INFINITY },
490                 { Percentile.EstimationType.R_9, Double.POSITIVE_INFINITY }, }, 50d, 0d);
491 
492         specialValues = new double[] { 1d, 1d, Double.NaN, Double.NaN };
493         Assert.assertTrue(Double.isNaN(percentile.evaluate(specialValues)));
494         testAssertMappedValues(specialValues, new Object[][] {
495                 { Percentile.EstimationType.LEGACY, Double.NaN }, { Percentile.EstimationType.R_1, 1.0 }, { Percentile.EstimationType.R_2, 1.0 }, { Percentile.EstimationType.R_3, 1.0 },
496                 { Percentile.EstimationType.R_4, 1.0 }, { Percentile.EstimationType.R_5, 1.0 },{ Percentile.EstimationType.R_6, 1.0 },{ Percentile.EstimationType.R_7, 1.0 },
497                 { Percentile.EstimationType.R_8, 1.0 }, { Percentile.EstimationType.R_9, 1.0 },}, 50d, 0d);
498 
499         specialValues =
500                 new double[] { 1d, 1d, Double.NEGATIVE_INFINITY,
501                         Double.NEGATIVE_INFINITY };
502 
503         testAssertMappedValues(specialValues, new Object[][] {
504                 { Percentile.EstimationType.LEGACY, Double.NaN }, { Percentile.EstimationType.R_1, Double.NaN },
505                 { Percentile.EstimationType.R_2, Double.NaN }, { Percentile.EstimationType.R_3, Double.NaN }, { Percentile.EstimationType.R_4, Double.NaN },
506                 { Percentile.EstimationType.R_5, Double.NaN }, { Percentile.EstimationType.R_6, Double.NaN },
507                 { Percentile.EstimationType.R_7, Double.NaN }, { Percentile.EstimationType.R_8, Double.NaN }, { Percentile.EstimationType.R_9, Double.NaN }
508                 }, 50d, 0d);
509     }
510 
511     @Test
512     public void testAllTechniquesSetQuantile() {
513         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
514             reset(10, e);
515             final Percentile percentile = getUnivariateStatistic();
516             percentile.setQuantile(100); // OK
517             Assert.assertEquals(100, percentile.getQuantile(), 0);
518             try {
519                 percentile.setQuantile(0);
520                 Assert.fail("Expecting MathIllegalArgumentException");
521             } catch (final MathIllegalArgumentException ex) {
522                 // expected
523             }
524             try {
525                 new Percentile(0);
526                 Assert.fail("Expecting MathIllegalArgumentException");
527             } catch (final MathIllegalArgumentException ex) {
528                 // expected
529             }
530         }
531     }
532 
533     @Test
534     public void testAllTechniquesEvaluateArraySegmentWeighted() {
535         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
536             reset(quantile, e);
537             testEvaluateArraySegmentWeighted();
538         }
539     }
540 
541     @Test
542     public void testAllTechniquesEvaluateArraySegment() {
543         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
544             reset(quantile, e);
545             testEvaluateArraySegment();
546         }
547     }
548 
549     @Test
550     public void testAllTechniquesWeightedConsistency() {
551         for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
552             reset(quantile, e);
553             testWeightedConsistency();
554         }
555     }
556 
557     @Test
558     public void testAllTechniquesEvaluation() {
559 
560         testAssertMappedValues(testArray, new Object[][] { { Percentile.EstimationType.LEGACY, 20.820 },
561                 { Percentile.EstimationType.R_1, 19.800 }, { Percentile.EstimationType.R_2, 19.800 }, { Percentile.EstimationType.R_3, 19.800 },
562                 { Percentile.EstimationType.R_4, 19.310 }, { Percentile.EstimationType.R_5, 20.280 }, { Percentile.EstimationType.R_6, 20.820 },
563                 { Percentile.EstimationType.R_7, 19.555 }, { Percentile.EstimationType.R_8, 20.460 },{ Percentile.EstimationType.R_9, 20.415} },
564                 DEFAULT_PERCENTILE, tolerance);
565     }
566 
567     @Test
568     public void testPercentileWithTechnique() {
569         reset (50, Percentile.EstimationType.LEGACY);
570         final Percentile p = getUnivariateStatistic();
571         Assert.assertEquals(Percentile.EstimationType.LEGACY, p.getEstimationType());
572         Assert.assertNotEquals(Percentile.EstimationType.R_1, p.getEstimationType());
573     }
574 
575     static final int TINY = 10;
576     static final int SMALL = 50;
577     static final int NOMINAL = 100;
578     static final int MEDIUM = 500;
579     static final int STANDARD = 1000;
580     static final int BIG = 10000;
581     static final int VERY_BIG = 50000;
582     static final int LARGE = 1000000;
583     static final int VERY_LARGE = 10000000;
584     static final int[] sampleSizes = {TINY , SMALL , NOMINAL , MEDIUM ,
585             STANDARD, BIG };
586 
587     @Test
588     public void testStoredVsDirect() {
589         final ContinuousDistribution.Sampler sampler =
590             NormalDistribution.of(4000, 50).createSampler(RandomSource.JDK.create(Long.MAX_VALUE));
591 
592         for (final int sampleSize : sampleSizes) {
593             final double[] data = AbstractRealDistribution.sample(sampleSize, sampler);
594             for (final double p : new double[] { 50d, 95d }) {
595                 for (final Percentile.EstimationType e : Percentile.EstimationType.values()) {
596                     reset(p, e);
597                     final Percentile pStoredData = getUnivariateStatistic();
598                     pStoredData.setData(data);
599                     final double storedDataResult = pStoredData.evaluate();
600                     pStoredData.setData(null);
601                     final Percentile pDirect = getUnivariateStatistic();
602                     Assert.assertEquals("Sample=" + sampleSize + ", P=" + p + " e=" + e,
603                                         storedDataResult,
604                                         pDirect.evaluate(data), 0d);
605                 }
606             }
607         }
608     }
609 
610     @Test
611     public void testPercentileWithDataRef() {
612         reset(50.0, Percentile.EstimationType.R_7);
613         final Percentile p = getUnivariateStatistic();
614         p.setData(testArray);
615         Assert.assertEquals(Percentile.EstimationType.R_7, p.getEstimationType());
616         Assert.assertNotEquals(Percentile.EstimationType.R_1, p.getEstimationType());
617         Assert.assertEquals(12d, p.evaluate(), 0d);
618         Assert.assertEquals(12.16d, p.evaluate(60d), 0d);
619     }
620 
621     @Test(expected=NullArgumentException.class)
622     public void testNullEstimation() {
623         type = null;
624         getUnivariateStatistic();
625     }
626 
627     @Test
628     public void testAllEstimationTechniquesOnlyLimits() {
629         final int N=testArray.length;
630 
631         final double[] input = Arrays.copyOf(testArray, testArray.length);
632         Arrays.sort(input);
633         final double min = input[0];
634         final double max=input[input.length-1];
635         //limits may be ducked by 0.01 to induce the condition of p<pMin
636         final Object[][] map =
637                 new Object[][] { { Percentile.EstimationType.LEGACY, 0d, 1d }, { Percentile.EstimationType.R_1, 0d, 1d },
638                         { Percentile.EstimationType.R_2, 0d,1d }, { Percentile.EstimationType.R_3, 0.5/N,1d },
639                         { Percentile.EstimationType.R_4, 1d/N-0.001,1d },
640                         { Percentile.EstimationType.R_5, 0.5/N-0.001,(N-0.5)/N}, { Percentile.EstimationType.R_6, 0.99d/(N+1),
641                             1.01d*N/(N+1)},
642                         { Percentile.EstimationType.R_7, 0d,1d}, { Percentile.EstimationType.R_8, 1.99d/3/(N+1d/3),
643                             (N-1d/3)/(N+1d/3)},
644                         { Percentile.EstimationType.R_9, 4.99d/8/(N+0.25), (N-3d/8)/(N+0.25)} };
645 
646         for(final Object[] arr:map) {
647             final Percentile.EstimationType t= (Percentile.EstimationType) arr[0];
648             double pMin=(Double)arr[1];
649             final double pMax=(Double)arr[2];
650             Assert.assertEquals("Type:"+t,0d, t.index(pMin, N),0d);
651             Assert.assertEquals("Type:"+t,N, t.index(pMax, N),0.5d);
652             pMin=pMin==0d?pMin+0.01:pMin;
653             testAssertMappedValues(testArray, new Object[][] { { t, min }}, pMin, 0.01);
654             testAssertMappedValues(testArray, new Object[][] { { t, max }}, pMax * 100, tolerance);
655         }
656     }
657 
658     @Test
659     public void testAllEstimationTechniquesOnly() {
660         Assert.assertEquals("Legacy Apache Commons Math",Percentile.EstimationType.LEGACY.getName());
661         final Object[][] map =
662                 new Object[][] { { Percentile.EstimationType.LEGACY, 20.82 }, { Percentile.EstimationType.R_1, 19.8 },
663                         { Percentile.EstimationType.R_2, 19.8 }, { Percentile.EstimationType.R_3, 19.8 }, { Percentile.EstimationType.R_4, 19.310 },
664                         { Percentile.EstimationType.R_5, 20.280}, { Percentile.EstimationType.R_6, 20.820},
665                         { Percentile.EstimationType.R_7, 19.555 }, { Percentile.EstimationType.R_8, 20.460 },{Percentile.EstimationType.R_9,20.415} };
666         try {
667             Percentile.EstimationType.LEGACY.evaluate(testArray, -1d, new KthSelector(new MedianOf3PivotingStrategy()));
668         } catch (final OutOfRangeException oore) {
669         }
670         try {
671             Percentile.EstimationType.LEGACY.evaluate(testArray, 101d, new KthSelector());
672         } catch (final OutOfRangeException oore) {
673         }
674         try {
675             Percentile.EstimationType.LEGACY.evaluate(testArray, 50d, new KthSelector());
676         } catch(final OutOfRangeException oore) {
677         }
678         for (final Object[] o : map) {
679             final Percentile.EstimationType e = (Percentile.EstimationType) o[0];
680             final double expected = (Double) o[1];
681             final double result = e.evaluate(testArray, DEFAULT_PERCENTILE, new KthSelector());
682             Assert.assertEquals("expected[" + e + "] = " + expected +
683                     " but was = " + result, expected, result, tolerance);
684         }
685     }
686 
687     @Test
688     public void testAllEstimationTechniquesOnlyForAllPivotingStrategies() {
689 
690         Assert.assertEquals("Legacy Apache Commons Math",Percentile.EstimationType.LEGACY.getName());
691 
692         for (final PivotingStrategy strategy : new PivotingStrategy[] {
693             new MedianOf3PivotingStrategy(),
694             new CentralPivotingStrategy(),
695             new RandomPivotingStrategy(RandomSource.WELL_1024_A, 0xf097c734e4740053L)
696         }) {
697             kthSelector = new KthSelector(strategy);
698             testAllEstimationTechniquesOnly();
699         }
700     }
701 
702     @Test
703     public void testAllEstimationTechniquesOnlyForExtremeIndexes() {
704         final double MAX=100;
705         final Object[][] map =
706                 new Object[][] { { Percentile.EstimationType.LEGACY, 0d, MAX}, { Percentile.EstimationType.R_1, 0d,MAX+0.5 },
707                 { Percentile.EstimationType.R_2, 0d,MAX}, { Percentile.EstimationType.R_3, 0d,MAX }, { Percentile.EstimationType.R_4, 0d,MAX },
708                 { Percentile.EstimationType.R_5, 0d,MAX }, { Percentile.EstimationType.R_6, 0d,MAX },
709                 { Percentile.EstimationType.R_7, 0d,MAX }, { Percentile.EstimationType.R_8, 0d,MAX }, { Percentile.EstimationType.R_9, 0d,MAX }  };
710         for (final Object[] o : map) {
711             final Percentile.EstimationType e = (Percentile.EstimationType) o[0];
712                 Assert.assertEquals(((Double)o[1]).doubleValue(),
713                         e.index(0d, (int)MAX),0d);
714                 Assert.assertEquals("Enum:"+e,((Double)o[2]).doubleValue(),
715                         e.index(1.0, (int)MAX),0d);
716             }
717     }
718 
719     @Test
720     public void testAllEstimationTechniquesOnlyForNullsAndOOR() {
721 
722         final Object[][] map =
723                 new Object[][] { { Percentile.EstimationType.LEGACY, 20.82 }, { Percentile.EstimationType.R_1, 19.8 },
724                         { Percentile.EstimationType.R_2, 19.8 }, { Percentile.EstimationType.R_3, 19.8 }, { Percentile.EstimationType.R_4, 19.310 },
725                         { Percentile.EstimationType.R_5, 20.280}, { Percentile.EstimationType.R_6, 20.820},
726                         { Percentile.EstimationType.R_7, 19.555 }, { Percentile.EstimationType.R_8, 20.460 },{ Percentile.EstimationType.R_9, 20.415 } };
727         for (final Object[] o : map) {
728             final Percentile.EstimationType e = (Percentile.EstimationType) o[0];
729             try {
730                 e.evaluate(null, DEFAULT_PERCENTILE, new KthSelector());
731                 Assert.fail("Expecting NullArgumentException");
732             } catch (final NullArgumentException nae) {
733                 // expected
734             }
735             try {
736                 e.evaluate(testArray, 120, new KthSelector());
737                 Assert.fail("Expecting OutOfRangeException");
738             } catch (final OutOfRangeException oore) {
739                 // expected
740             }
741         }
742     }
743 
744     /**
745      * Simple test assertion utility method assuming {@link NaNStrategy default}
746      * nan handling strategy specific to each
747      * {@link org.apache.commons.math4.legacy.stat.descriptive.rank.Percentile.EstimationType EstimationType type}
748      *
749      * @param data input data
750      * @param map of expected result against a {@link org.apache.commons.math4.legacy.stat.descriptive.rank.Percentile.EstimationType EstimationType}
751      * @param p the quantile to compute for
752      * @param tolerance the tolerance of difference allowed
753      */
754     protected void testAssertMappedValues(final double[] data, final Object[][] map,
755             final Double p, final Double tolerance) {
756         for (final Object[] o : map) {
757             final Percentile.EstimationType e = (Percentile.EstimationType) o[0];
758             final double expected = (Double) o[1];
759             try {
760                 reset(p, e);
761                 final double result = getUnivariateStatistic().evaluate(data);
762                 Assert.assertEquals("expected[" + e + "] = " + expected +
763                     " but was = " + result, expected, result, tolerance);
764             } catch(final Exception ex) {
765                 Assert.fail("Exception occured for estimation type "+e+":"+
766                         ex.getLocalizedMessage());
767             }
768         }
769     }
770 
771     // Some NaNStrategy specific testing
772     @Test
773     public void testNanStrategySpecific() {
774         double[] specialValues = new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN };
775         Assert.assertTrue(Double.isNaN(new Percentile(50d).withEstimationType(Percentile.EstimationType.LEGACY).withNaNStrategy(NaNStrategy.MAXIMAL).evaluate(specialValues, 3, 3)));
776         Assert.assertEquals(2d,new Percentile(50d).withEstimationType(Percentile.EstimationType.R_1).withNaNStrategy(NaNStrategy.REMOVED).evaluate(specialValues),0d);
777         Assert.assertEquals(Double.NaN,new Percentile(50d).withEstimationType(Percentile.EstimationType.R_5).withNaNStrategy(NaNStrategy.REMOVED).evaluate(new double[] {Double.NaN,Double.NaN,Double.NaN}),0d);
778         Assert.assertEquals(50d,new Percentile(50d).withEstimationType(Percentile.EstimationType.R_7).withNaNStrategy(NaNStrategy.MINIMAL).evaluate(new double[] {50d,50d,50d},1,2),0d);
779 
780         specialValues = new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN, Double.NaN };
781         Assert.assertEquals(3.5,new Percentile().evaluate(specialValues, 3, 4),0d);
782         Assert.assertEquals(4d,new Percentile().evaluate(specialValues, 4, 3),0d);
783         Assert.assertTrue(Double.isNaN(new Percentile().evaluate(specialValues, 5, 2)));
784 
785         specialValues = new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN, Double.NaN, 5d, 6d };
786         Assert.assertEquals(4.5,new Percentile().evaluate(specialValues, 3, 6),0d);
787         Assert.assertEquals(5d,new Percentile().evaluate(specialValues, 4, 5),0d);
788         Assert.assertTrue(Double.isNaN(new Percentile().evaluate(specialValues, 5, 2)));
789         Assert.assertTrue(Double.isNaN(new Percentile().evaluate(specialValues, 5, 1)));
790         Assert.assertEquals(5.5,new Percentile().evaluate(specialValues, 5, 4),0d);
791     }
792 
793     // Some NaNStrategy specific testing
794     @Test(expected=NotANumberException.class)
795     public void testNanStrategyFailed() {
796         double[] specialValues =
797                 new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN };
798         new Percentile(50d).
799         withEstimationType(Percentile.EstimationType.R_9).
800         withNaNStrategy(NaNStrategy.FAILED).
801         evaluate(specialValues);
802     }
803 
804     @Test
805     public void testAllTechniquesSpecialValuesWithNaNStrategy() {
806         double[] specialValues =
807                 new double[] { 0d, 1d, 2d, 3d, 4d, Double.NaN };
808         try {
809             new Percentile(50d).withEstimationType(Percentile.EstimationType.LEGACY).withNaNStrategy(null);
810             Assert.fail("Expecting NullArgumentArgumentException "
811                     + "for null Nan Strategy");
812         } catch (NullArgumentException ex) {
813             // expected
814         }
815         //This is as per each type's default NaNStrategy
816         testAssertMappedValues(specialValues, new Object[][] {
817                 { Percentile.EstimationType.LEGACY, 2.5d }, { Percentile.EstimationType.R_1, 2.0 }, { Percentile.EstimationType.R_2, 2.0 }, { Percentile.EstimationType.R_3, 1.0 },
818                 { Percentile.EstimationType.R_4, 1.5 }, { Percentile.EstimationType.R_5, 2.0 }, { Percentile.EstimationType.R_6, 2.0 },
819                 { Percentile.EstimationType.R_7, 2.0 }, { Percentile.EstimationType.R_8, 2.0 }, { Percentile.EstimationType.R_9, 2.0 }}, 50d, 0d);
820 
821         //This is as per MAXIMAL and hence the values tend a +0.5 upward
822         testAssertMappedValues(specialValues, new Object[][] {
823                 { Percentile.EstimationType.LEGACY, 2.5d }, { Percentile.EstimationType.R_1, 2.0 }, { Percentile.EstimationType.R_2, 2.5 }, { Percentile.EstimationType.R_3, 2.0 },
824                 { Percentile.EstimationType.R_4, 2.0 }, { Percentile.EstimationType.R_5, 2.5 }, { Percentile.EstimationType.R_6, 2.5 },
825                 { Percentile.EstimationType.R_7, 2.5 }, { Percentile.EstimationType.R_8, 2.5 }, { Percentile.EstimationType.R_9, 2.5 }}, 50d, 0d,
826                 NaNStrategy.MAXIMAL);
827 
828         //This is as per MINIMAL and hence the values tend a -0.5 downward
829         testAssertMappedValues(specialValues, new Object[][] {
830                 { Percentile.EstimationType.LEGACY, 1.5d }, { Percentile.EstimationType.R_1, 1.0 }, { Percentile.EstimationType.R_2, 1.5 }, { Percentile.EstimationType.R_3, 1.0 },
831                 { Percentile.EstimationType.R_4, 1.0 }, { Percentile.EstimationType.R_5, 1.5 }, { Percentile.EstimationType.R_6, 1.5 },
832                 { Percentile.EstimationType.R_7, 1.5 }, { Percentile.EstimationType.R_8, 1.5 }, { Percentile.EstimationType.R_9, 1.5 }}, 50d, 0d,
833                 NaNStrategy.MINIMAL);
834 
835         //This is as per REMOVED as here Percentile.Type.CM changed its value from default
836         //while rest of Estimation types were anyways defaulted to REMOVED
837         testAssertMappedValues(specialValues, new Object[][] {
838                 { Percentile.EstimationType.LEGACY, 2.0 }, { Percentile.EstimationType.R_1, 2.0 }, { Percentile.EstimationType.R_2, 2.0 }, { Percentile.EstimationType.R_3, 1.0 },
839                 { Percentile.EstimationType.R_4, 1.5 }, { Percentile.EstimationType.R_5, 2.0 }, { Percentile.EstimationType.R_6, 2.0 },
840                 { Percentile.EstimationType.R_7, 2.0 }, { Percentile.EstimationType.R_8, 2.0 }, { Percentile.EstimationType.R_9, 2.0 }}, 50d, 0d,
841                 NaNStrategy.REMOVED);
842     }
843 
844     /**
845      * Simple test assertion utility method
846      *
847      * @param data input data
848      * @param map of expected result against a
849      * {@link org.apache.commons.math4.legacy.stat.descriptive.rank.Percentile.EstimationType EstimationType}
850      * @param p the quantile to compute for
851      * @param tolerance the tolerance of difference allowed
852      * @param nanStrategy NaNStrategy to be passed
853      */
854     protected void testAssertMappedValues(double[] data, Object[][] map,
855                                           Double p, Double tolerance, NaNStrategy nanStrategy) {
856         for (Object[] o : map) {
857             Percentile.EstimationType e = (Percentile.EstimationType) o[0];
858             double expected = (Double) o[1];
859             try {
860                 double result = new Percentile(p).withEstimationType(e).withNaNStrategy(nanStrategy).evaluate(data);
861                 Assert.assertEquals("expected[" + e + "] = " + expected + " but was = " + result,
862                                     expected, result, tolerance);
863             } catch(Exception ex) {
864                 Assert.fail("Exception occured for estimation type " + e + ":" + ex.getLocalizedMessage());
865             }
866         }
867     }
868 
869     // Test if weighted percentile got the same result with the non-weighted one
870     // when all weights are equal to each other.
871     @Test
872     public void testResultWithNonWeightedPercentile() {
873         double[] dataset =
874                 new double[] { Double.NaN, Double.NaN, Double.NaN };
875         double[] weights =
876                 new double[] { 1, 1, 1 };
877         Percentile p = new Percentile().
878                            withEstimationType(Percentile.EstimationType.R_7).
879                            withNaNStrategy(NaNStrategy.MAXIMAL);
880         Assert.assertEquals(p.evaluate(dataset, weights, 25d), p.evaluate(dataset, 25d), 0d);
881         Assert.assertEquals(p.evaluate(dataset, weights, 50d), p.evaluate(dataset, 50d), 0d);
882         Assert.assertEquals(p.evaluate(dataset, weights, 75d), p.evaluate(dataset, 75d), 0d);
883         p = new Percentile().
884                 withEstimationType(Percentile.EstimationType.R_7).
885                 withNaNStrategy(NaNStrategy.MINIMAL);
886         Assert.assertEquals(p.evaluate(dataset, weights, 25d), p.evaluate(dataset, 25d), 0d);
887         Assert.assertEquals(p.evaluate(dataset, weights, 50d), p.evaluate(dataset, 50d), 0d);
888         Assert.assertEquals(p.evaluate(dataset, weights, 75d), p.evaluate(dataset, 75d), 0d);
889         p = new Percentile().
890                 withEstimationType(Percentile.EstimationType.R_7);
891         Assert.assertEquals(p.evaluate(dataset, weights, 25d), p.evaluate(dataset, 25d), 0d);
892         Assert.assertEquals(p.evaluate(dataset, weights, 50d), p.evaluate(dataset, 50d), 0d);
893         Assert.assertEquals(p.evaluate(dataset, weights, 75d), p.evaluate(dataset, 75d), 0d);
894     }
895 
896     @Test(expected=MathIllegalArgumentException.class)
897     public void testDataAndWeightsLength() {
898         double[] dataset =
899                 new double[] { 1d, 2d, 3d, 4d, 5d };
900         double[] weights =
901         new double[] { 1, 1, 1, 1 };
902         new Percentile().
903         withEstimationType(Percentile.EstimationType.R_7).
904         evaluate(dataset, weights, 50d);
905     }
906 
907     @Test
908     public void testWeightedPercentileWithSpecialValues() {
909         double[] dataset = new double[] { 3, 4, 2, 9 };
910         double[] weights = new double[] { 2, 6, 4, 3};
911         Percentile p = new Percentile().
912                            withEstimationType(Percentile.EstimationType.R_7);
913         Assert.assertEquals( 3.53125, p.evaluate(dataset, weights, 50d), 0d);
914     }
915 
916     @Test(expected=MathIllegalArgumentException.class)
917     public void testsetDataInputLength() {
918         double[] dataset = new double[] { 3, 4, 2, 9 };
919         double[] weights = new double[] { 1, 1, 1 };
920         new Percentile().setData(dataset, weights);
921         new Percentile().setData(dataset, weights, 0, dataset.length);
922     }
923 
924     @Test(expected=NotANumberException.class)
925     public void testsetDataNotANumber() {
926         double[] dataset = new double[] { 3, 4, 2, 9 };
927         double[] weights = new double[] { 1, 1, 1, Double.NaN };
928         new Percentile().setData(dataset, weights);
929         new Percentile().setData(dataset, weights, 0, dataset.length);
930     }
931 
932     @Test(expected=NotStrictlyPositiveException.class)
933     public void testsetDataPositiveWeights() {
934         double[] dataset = new double[] { 3, 4, 2, 9 };
935         double[] weights = new double[] { -1, -1, -1, -1 };
936         new Percentile().setData(dataset, weights);
937         new Percentile().setData(dataset, weights, 0, dataset.length);
938     }
939 
940     @Test(expected=NotPositiveException.class)
941     public void testsetDataPositivIndex() {
942         double[] dataset = new double[] { 3, 4, 2, 9 };
943         double[] weights = new double[] { 1, 1, 1, 1 };
944         new Percentile().setData(dataset, weights, -1, dataset.length);
945         new Percentile().setData(dataset, weights, 0, -1);
946     }
947 
948     @Test(expected=NumberIsTooLargeException.class)
949     public void testsetDataIndexOutBound() {
950         double[] dataset = new double[] { 3, 4, 2, 9 };
951         double[] weights = new double[] { 1, 1, 1, 1 };
952         new Percentile().setData(dataset, weights, 0, dataset.length+1);
953     }
954 
955     @Test(expected=NullPointerException.class)
956     public void testsetDataInputNull() {
957         new Percentile().setData(null, null);
958         new Percentile().setData(null, null, 0, 0);
959     }
960 
961     @Test(expected=MathIllegalArgumentException.class)
962     public void testevaluateInputLength() {
963         double[] dataset = new double[] { 3, 4, 2, 9 };
964         double[] weights = new double[] { 1, 1, 1 };
965         Percentile p = new Percentile().withEstimationType(Percentile.EstimationType.R_7);
966         p.setData(dataset, weights);
967         p.evaluate(50);
968         p.evaluate(dataset, weights, 50);
969         p.evaluate(dataset, weights, 0, dataset.length);
970         p.evaluate(dataset, weights, 0, dataset.length, 50);
971     }
972 
973     @Test(expected=NotPositiveException.class)
974     public void testevaluatePositivIndex() {
975         double[] dataset = new double[] { 3, 4, 2, 9 };
976         double[] weights = new double[] { 1, 1, 1 ,1};
977         Percentile p = new Percentile().withEstimationType(Percentile.EstimationType.R_7);
978         p.setData(dataset, weights);
979         p.evaluate(50);
980         p.evaluate(dataset, weights, 50);
981         p.evaluate(dataset, weights, -1, dataset.length);
982         p.evaluate(dataset, weights, 0, -1, 50);
983     }
984 
985     @Test(expected=NotStrictlyPositiveException.class)
986     public void testevaluatePositivWeights() {
987         double[] dataset = new double[] { 3, 4, 2, 9 };
988         double[] weights = new double[] { -1, -1, -1 , -1};
989         Percentile p = new Percentile().withEstimationType(Percentile.EstimationType.R_7);
990         p.setData(dataset, weights);
991         p.evaluate(50);
992         p.evaluate(dataset, weights, 50);
993         p.evaluate(dataset, weights, 0, dataset.length);
994         p.evaluate(dataset, weights, 0, dataset.length, 50);
995     }
996 
997     @Test(expected=NotANumberException.class)
998     public void testevaluateNotANumber() {
999         double[] dataset = new double[] { 3, 4, 2, 9 };
1000         double[] weights = new double[] { 1, 1, 1, Double.NaN};
1001         Percentile p = new Percentile().withEstimationType(Percentile.EstimationType.R_7);
1002         p.setData(dataset, weights);
1003         p.evaluate(50);
1004         p.evaluate(dataset, weights, 50);
1005         p.evaluate(dataset, weights, 0, dataset.length);
1006         p.evaluate(dataset, weights, 0, dataset.length, 50);
1007     }
1008 
1009     @Test(expected=NotStrictlyPositiveException.class)
1010     public void testevaluatePositiveWeights() {
1011         double[] dataset = new double[] { 3, 4, 2, 9 };
1012         double[] weights = new double[] { -1, -1, -1, -1};
1013         Percentile p = new Percentile().withEstimationType(Percentile.EstimationType.R_7);
1014         p.setData(dataset, weights);
1015         p.evaluate(50);
1016         p.evaluate(dataset, weights, 50);
1017         p.evaluate(dataset, weights, 0, dataset.length);
1018         p.evaluate(dataset, weights, 0, dataset.length, 50);
1019     }
1020 
1021     @Test(expected=OutOfRangeException.class)
1022     public void testevaluatep() {
1023         double[] dataset = new double[] { 3, 4, 2, 9 };
1024         double[] weights = new double[] { 1, 1, 1, 1};
1025         Percentile p = new Percentile().withEstimationType(Percentile.EstimationType.R_7);
1026         p.setData(dataset, weights);
1027         p.evaluate(101);
1028         p.evaluate(dataset, weights, 101);
1029         p.evaluate(dataset, weights, 0, dataset.length);
1030         p.evaluate(dataset, weights, 0, dataset.length, 101);
1031     }
1032 
1033     @Test(expected=NumberIsTooLargeException.class)
1034     public void testevaluateIndexBound() {
1035         double[] dataset = new double[] { 3, 4, 2, 9 };
1036         double[] weights = new double[] { 1, 1, 1, 1};
1037         Percentile p = new Percentile().withEstimationType(Percentile.EstimationType.R_7);
1038         p.setData(dataset, weights);
1039         p.evaluate(50);
1040         p.evaluate(dataset, weights, 50);
1041         p.evaluate(dataset, weights, 0, dataset.length + 1);
1042         p.evaluate(dataset, weights, 0, dataset.length + 1, 50);
1043     }
1044 }