可能的奇异矩阵(OCTAVE与Python)的线性方程组求解器结果

时间:2019-05-03 00:33:20

标签: python numpy octave

我尝试使用OCTAVE和Python解决线性方程组,并且OCTAVE给出了关于机器精度奇异的矩阵的警告,问题在于这两种语言给出的答案完全不同,在某种程度上“正确”。我知道其原因可能是两种语言之间的计算精度不同。对于上面的线性方程组,如何解决这个问题以获得相同的答案?还是我应该再“信任”哪个答案?

我在自己的MacBook Pro中运行代码,并尝试将Python中的数据更改为float32和float64,而答案仍然与OCTAVE的答案不同。

Python代码如下:

solver.py

import numpy as np
import pandas as pd


def Laguerre(X, k):
    '''
    ARGS:
        X: the values that laguerre function valued;
        k: the rank of the laguerre function.
    RETURN:
        the kth laguerre function value at X.
    '''
    if k == 0:
        return np.ones_like(X)
    elif k == 1:
        return 1-X
    else:
        return (1/k) * ((2*k-1-X)*Laguerre(X, k-1)
                        - (k-1)*Laguerre(X, k-2))

def Basis_Laguerre(X, J):
    '''
    ARGS:
        X: Same as above;
        J: number of the basis function.
    RETURN:
        the J basis functions valued at X.
    '''
    Y = np.zeros((J, X.shape[0]))

    for j in range(J):
        Y[j, :] = Laguerre(X, j)

    return Y

def Payoff_put(r, T, S, K, M, N):
    '''
    Discount Payoff function of put
    ARGS:
        r: the discount rate;
        T: the maturity date;
        S: the possible paths of stock's price;
        K: the strike price;
        M: number of time intervals;
        N: number of MC paths.
    RETURN:
        the payoff put function values at S.
    '''

    dt = T/M
    h  = np.zeros((M, N))
    h[-1, :] = np.exp(-r*T) * np.maximum(K-S[-1, :], 0)

    for i in range(M):
        m = i + 1
        time = m*dt
        h[i, :] = np.exp(-r*time) * np.maximum(K-S[m, :], 0)

    return h


# Backward induction for estimation regression coefficients.
# The code is written on simple laguerre polynomials, which
# be replaced with other interested basis functions.
def Regression_Beta(S, h, M, N, J):
    '''
    ARGS:
        S: Stock price process S(M+1, N);
        h: Payoff function h(M, N);
        M, N, J: as above.
    RETURN:
        Regression coefficients Beta(J, M)
    '''

    Beta = np.zeros((J, M))
    c    = np.zeros((M, N))
    V    = h[-1, :]

    for i in range(M-2, -1, -1):
        m = i + 1  # current time index

        # basis functions, different basis functions can
        # be used here by replacing with corresponding functions
        Phi = Basis_Laguerre(S[m, :], J)

        sum1 = 0
        sum2 = 0

        for j in range(N):
            tmp = np.matrix(Phi[:, j])
            sum1 += np.matmul(np.transpose(tmp), tmp)
            sum2 += V[j] * Phi[:, j]


        print ("sum1 =")
        print (sum1)
        print ("sum2 =")
        print (sum2)
        Beta[:, i] = np.linalg.solve(sum1, sum2)
        print ("ans =")
        print (Beta[:, i])
        print ("multiply of sum1 and ans =")
        print (np.matmul(sum1, Beta[:, i]))
        c[i, :] = np.matmul(np.matrix(Beta[:, i]), Phi)
        V = np.maximum(h[i, :], c[i, :])

    return Beta

def main():

    r   = 0.05
    q   = 0.0
    sig = 0.4
    S0  = 20
    K   = 20
    T   = 1

    N   = 4
    M   = 2
    J   = 5

    S = pd.read_csv("stocks.csv", header=None).values
    h = Payoff_put(r, T, S, K, M, N)
    beta = Regression_Beta(S, h, M, N, J)

if __name__ == '__main__':
    main()

OCTAVE代码如下:

Basis_laguerre.m

% J: number of basis functions

function Y = Basis_laguerre(X, J)

Y = zeros(J, length(X));

for j = 1:J
    Y(j, :) = Laguerre(X, j-1);
end

end

function Y = Laguerre(X, k)

if (k == 0)
    Y = 1;
elseif (k == 1)
    Y = 1 - X;
else
    Y = (1/k) * ((2*k-1-X).*Laguerre(X, k-1) - (k-1)*Laguerre(X, k-2));
end

end

Payoff_put.m

% Discounted Payoff function of put
% K: strike
% M: number of time intervals
% N: number of MC paths

function h = Payoff_put(r, T, S, K, M, N)

dt = T/M;
h  = zeros(M, N);
h(M, :) = exp(-r*T) * max(K-S(M+1, :), 0);

for i = 1:M
    m = i+1;     % current time index (exercise dates)
    time = (m-1)*dt;
    h(i, :) = exp(-r*time) * max(K-S(m, :), 0);
end

end

Regression_Beta.m

% Backward induction for estimating regression coefficients
% The code is written on simple laguerre polynomials, which can
% be replaced with other interested basis functions.
% S: Stock price process S(M+1, N); h: Payoff function h(M, N)
% Beta: Regression coefficients Beta(J, M)

function Beta = Regression_Beta(S, h, M, N, J)

Beta = zeros(J, M);
c    = zeros(M, N);     % continuation values
V=h(M, :);

for i = (M-1):-1:1

    m = i+1;    % current time index

    % basis functions, different basis functions can
    % be used here by replacing with corresponding functions
    Phi = Basis_laguerre(S(m, :), J);

    sum1 = 0;
    sum2 = 0;

    for j = 1:N
        sum1 = sum1 + Phi(:, j) * Phi(:, j)';
        sum2 = sum2 + Phi(:, j) * V(j);
    end

    sum1
    sum2

    Beta(:, i) = sum1 \ sum2;

    Beta(:, i)

    display('multiply of sum1 and ans =')
    disp(sum1 * Beta(:, i))
    c(i, :) = (Beta(:, i)') * Phi;

    V = max(h(i, :), c(i, :));
end

end

solver.m

% load Stocks data
load stocks.csv


% problem parameters
r=0.05;
q=0.0;
sig=0.4;
S0=20;
K=20;
T=1;

N=4;
M=2;
J=5;

S = stocks;
h = Payoff_put(r, T, S, K, M, N);
Beta = Regression_Beta(S, h, M, N, J);

OCTAVE结果为:

  

[1.130633 -3.837296 -1.001382 -0.163776。 -0.013822]

Python结果为:

  

[2.13567025e + 01 1.81036596e + 00 1.66146104e-01 2.30085805e-03    -1.49597784e-03]

stocks.csv文件如下:

  

2.000000000000000000e + 01,2.000000000000000000e + 01,2.000000000000000000e + 01,2.000000000000000000e + 01   1.845592355910364191e + 01,1.573915309084057768e + 01,6.180780215533716593e + 00,2.952039953142780249e + 01   1.122967711904559884e + 01,1.107857526050260260ee + 01,6.046507773926283136e + 00,2.096170988901321763e + 01

0 个答案:

没有答案