from wxPython.wx import *
import unittest
from NaCurves import *

class TestGivenFunction(unittest.TestCase):
        def testOne(self):
                expected = 1 / (1 + 100 * ((-1.0)**2))
                actual = givenFunction(-1.0)
                self.assertEquals (actual, expected)

        def testPow(self):
                self.assertEquals (1, (-1.0)**2)

        def testMany(self):
                dataset = [-1.0, -0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8, 1.0]
                for x in dataset:
                        expected = 1 / (1 + 100 * (x**2))
                        actual = givenFunction(x)
                        self.assertEquals (actual, expected)

class TestLagrange(unittest.TestCase):
        def setUp(self):
                self.dataset = [-1.0, -0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8, 1.0]
                self.l = Lagrange(self.dataset)
        def tearDown(self):
                self.l = None

        def _getX(self, i):
                return self.dataset[i]

        def testFunctionExistence(self):
                self.assert_(Lagrange);

        def testInsertPointX(self):
                l = Lagrange(self.dataset)
                self.assertEquals (l.getControlPointListX(), self.dataset)

        def testInsertPointY(self):
                l = Lagrange(self.dataset)
                listY = []
                for x in self.dataset:
                        listY.append(givenFunction(x))

                self.assertEquals (l.getControlPointListY(), listY)

        def testSubBasedFunctionOne(self):
                l = Lagrange(self.dataset)
                x = -0.1

                expected = (x - self._getX(1)) / (self._getX(0) - self._getX(1))
                self.assertEquals (l._subBasedFunction(x,0,1), expected)
                expected = (x - self._getX(2)) / (self._getX(0) - self._getX(2))
                self.assertEquals (l._subBasedFunction(x,0,2), expected)
                expected = (x - self._getX(3)) / (self._getX(0) - self._getX(3))
                self.assertEquals (l._subBasedFunction(x,0,3), expected)
                self.assertEquals (l._subBasedFunction(x,0,0), 1)

        def testSubBasedFunctionTwo(self):
                l = Lagrange(self.dataset)
                x = -0.1

                expected = (x - self._getX(0)) / (self._getX(1) - self._getX(0))
                self.assertEquals (l._subBasedFunction(x,1,0), expected)
                expected = (x - self._getX(2)) / (self._getX(1) - self._getX(2))
                self.assertEquals (l._subBasedFunction(x,1,2), expected)
                expected = (x - self._getX(3)) / (self._getX(1) - self._getX(3))
                self.assertEquals (l._subBasedFunction(x,1,3), expected)
                self.assertEquals (l._subBasedFunction(x,1,1), 1)

        def testSubBasedFunctionMany(self):
                l = Lagrange(self.dataset)
                x = -0.3
                for i in range(0, len(self.dataset)):
                        for j in range(0, len(self.dataset)):
                                actual = l._subBasedFunction (x, i, j)
                                if i == j:
                                        self.assertEquals (actual, 1)
                                else:
                                        expected = (x - self._getX(j)) / (self._getX(i) - self._getX(j))
                                        self.assertEquals (expected, actual)

        def testBasedFunctionOne(self):
                x = -0.1
                expected = 1
                for i in range(1, len(self.dataset)):
                        expected *= (x - self._getX(i)) / (self._getX(0) - self._getX(i))
                y0 = givenFunction(self.dataset[0])

                self.assertEquals (self.l._basedFunction(x, 0), expected)

        def testBasedFunctionTwo(self):
                x = -0.1
                expected = 1
                for i in range(0, len(self.dataset)):
                        if i == 1:
                                expected *= 1
                        else:
                                expected *= (x - self._getX(i)) / (self._getX(1) - self._getX(i))

                self.assertEquals (self.l._basedFunction(x, 1), expected)

        def testLagrangeFunction(self):
                self.assert_(self.l.perform)

        def testInterpolation(self):
                for x in self.dataset:
                        expected = givenFunction(x)
                        actual = self.l.perform(x)
                        self.assertEquals (actual, expected)

        def testOne(self):
                listX = [-0.8, -0.6, -0.4]
                l = Lagrange(listX)
                x = -0.5
                expected = 1
                actual = l._subBasedFunction(x, 0, 0)
                self.assertEquals (actual, expected)

                expected = (x - listX[1]) / (listX[0] - listX[1])
                actual = l._subBasedFunction(x, 0, 1)
                self.assertEquals (actual, expected)

                expected = (x - listX[2]) / (listX[0] - listX[2])
                actual = l._subBasedFunction(x, 0, 2)
                self.assertEquals (actual, expected)

                expected = (x - listX[0]) / (listX[1] - listX[0])
                actual = l._subBasedFunction(x, 1, 0)
                self.assertEquals (actual, expected)

        def testTwo(self):
                listX = [-0.8, -0.6, -0.4]
                l = Lagrange(listX)
                x = -0.5

                expected = 1
                expected *= (x - listX[1]) / (listX[0] - listX[1])
                expected *= (x - listX[2]) / (listX[0] - listX[2])
                actual = l._basedFunction(x, 0)
                self.assertEquals (actual, expected)

        def testThree(self):
                listX = [-0.8, -0.6, -0.4]
                l = Lagrange(listX)

                x = -0.8
                expected = 1
                expected *= (x - listX[1]) / (listX[0] - listX[1])
                expected *= (x - listX[2]) / (listX[0] - listX[2])
                expected *= givenFunction(x)

                actual = l.perform(x)

                self.assertEquals (expected, givenFunction(x))
                self.assertEquals (actual, expected)

class TestPiecewiseLagrange(unittest.TestCase):
        def setUp(self):
                self.dataset = [-1.0, -0.8, -0.6, -0.4, -0.2, 0.0, 0.2, 0.4, 0.6, 0.8, 1.0]
        def testPiecewiseExistence(self):
                self.assert_(PiecewiseLagrange)

        def testInsertPointX(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                self.assertEquals (pl.getControlPointListX(), self.dataset)

        def testInterpolation(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                for x in self.dataset:
                        expected = givenFunction(x)
                        actual = pl.perform(x)
                        self.assertEquals (actual, expected)

        def testPerformOne(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                subPl = Lagrange([-1.0, -0.8, -0.6, -0.4])
                x = -0.9
                expected = subPl.perform(x)
                actual = pl.perform(x)
                self.assertEquals (actual, expected)

        def testPerformTwo(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                subPl = Lagrange([-0.4, -0.2, 0.0, 0.2])
                x = 0.1
                expected = subPl.perform(x)
                actual = pl.perform(x)
                self.assertEquals (actual, expected)

        def testPerformThree(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                subPl = Lagrange([0.2, 0.4, 0.6, 0.8])
                x = 0.5
                expected = subPl.perform(x)
                actual = pl.perform(x)
                self.assertEquals (actual, expected)

        def testPerformFour(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                subPl = Lagrange([0.4, 0.6, 0.8, 1.0])
                x = 0.95
                expected = subPl.perform(x)
                actual = pl.perform(x)
                self.assertEquals (actual, expected)


        def testControlPointPiece(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                expected = [-1.0, -0.8, -0.6, -0.4]
                actual = pl.getPiece(1)
                self.assertEquals (actual, expected)

                expected = [-0.4, -0.2, 0.0, 0.2]
                actual = pl.getPiece(2)
                self.assertEquals (actual, expected)

                expected = [0.2, 0.4, 0.6, 0.8]
                actual = pl.getPiece(3)
                self.assertEquals (actual, expected)

                expected = [0.4, 0.6, 0.8, 1.0]
                actual = pl.getPiece(4)
                self.assertEquals (actual, expected)

        def testCountPieces(self):
                pl = PiecewiseLagrange(self.dataset, 4)
                self.assertEquals (pl.getCountPieces(), 4)

class TestSpline(unittest.TestCase):
        def setUp(self):
                self.dataset = [-1.0, -0.8, -0.6, -0.4, -0.2, 0, 0.2, 0.4, 0.6, 0.8, 1.0]
                self.s = Spline(self.dataset)

        def tearDown(self):
                self.s = None

        def testInterpolation(self):
                for x in self.dataset:
                        expected = givenFunction(x)
                        actual = self.s.perform(x)
                        self.assertEquals (actual, expected)

        def testInsertPointX(self):
                actual = self.s.getControlPointListX()
                self.assertEquals (actual, self.dataset)

        def testControlPointY(self):
                expected = []
                for x in self.dataset:
                        expected.append (givenFunction(x))
                actual = self.s.getControlPointListY()
                self.assertEquals (actual, expected)

        def testDeltaX(self):
                expected = -0.8 - (-1.0)
                actual = self.s.deltaX(0)
                self.assertEquals (actual, expected)

        def testDeltaY(self):
                expected = givenFunction(self.dataset[1]) - givenFunction(self.dataset[0])
                actual = self.s.deltaY(0)
                self.assertEquals (actual, expected)

        def testEmptyMatrixA(self):
                actual = self.s._makeEmptyMatrix()
                expected = [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
                self.assertEquals (actual, expected)

        def testMatrixA(self):
                actual = self.s._makeMatrixA()

                for i in range(0,9):
                        if i-1 >= 0:
                                self.assertEquals (round(actual[i-1][i], 1), 0.2)
                                self.assertEquals (round(actual[i][i-1], 1), 0.2)
                        self.assertEquals (round(actual[i][i], 1), 0.8)

        def testMatrixB(self):
                actual = self.s._makeMatrixB()

                expected = []
                for i in range(1,10):
                        expected.append([6 * ( self.deltaY(i)/self.deltaX(i) - self.deltaY(i-1)/self.deltaX(i-1) )])

                self.assertEquals (actual, expected)

        def deltaX(self, i):
                return self.dataset[i+1] - self.dataset[i]

        def deltaY(self, i):
                return givenFunction(self.dataset[i+1]) - givenFunction(self.dataset[i])

        def testDoublePrimeY(self):
                actual = self.s._makeDoublePrimeY()

                self.assertEquals (actual[0][0], 0.0)
                self.assertEquals (actual[10][0], 0.0)

        def testPerform(self):
                actual = self.s.perform(0)
                print actual

        def testAi(self):
                actual = self.s.getAi(0)
                print actual

        def testBi(self):
                actual = self.s.getBi(0)
                print actual

        def testCi(self):
                actual = self.s.getCi(0)
                print actual

        def testDi(self):
                actual = self.s.getDi(0)
                print actual

class TestApp(wxApp):
        def OnInit(self):
                return true

if __name__=="__main__":
        app = TestApp(0)

        unittest.main(argv=('','-v'))