我最近编写了Beddington DeAngelis模型的python实现,用于对捕食者和被捕食者进行建模。
我的问题是代码非常慢。当此程序必须在合理的时间范围内迭代1百万次时,10,000次迭代将花费230秒。
我知道我可以用C重写,因为这主要是数学运算,但是我真的想学习如何在python中正确向量化程序。
为简化起见,情况是我有两个形状为200x200的数组,我需要迭代每个数组的每个元素,同时使用其他数组中的相同索引元素以及该数组中的一些周围元素。因此,例如,如果我正在研究a [1] [1],则还需要:
b [1] [1]
a [0] [1]
a [0] [-1]
a [1] [0]
a [-1] [0]
整个操作应该是完全可矢量化的,因为我要在单个时间步中更改所有200x200x2的元素。
那我怎么调用这个函数来获取这些索引?
任何建议将不胜感激。
完整的上下文代码:(它看起来很吓人,但实际上非常简单)
import numpy as np
import copy
import time
def get_cell_zero_flux(m,i,j,x,y,prey):
"""
Fetch an array element that could be outside of the border
"""
if i >= x or i < 0 or j >= y or j < 0:
if prey: return 0.43058
else: return 0.718555
return m[i][j]
def get_laplacian(n,i,j,x,y,neighbors,h,prey):
"""
Generate the laplacian value for the given element
"""
total = 0
for ng in neighbors:
cell = get_cell_zero_flux(n,i+ng[0],j+ng[1],x,y,prey)
total += cell
return (total - 4*n[i][j]) / (h**2)
def next_n(n,p,nl,pl,d11,d12,d21,d22,t,r,e,beta,k,ni,w,b):
"""
Integrate prey population function
"""
return n + t * (r * ( 1 - n / k ) * n
- beta * n / ( b + n + w * p ) * p + d11 * nl + d12 * pl)
def next_p(n,p,nl,pl,d11,d12,d21,d22,t,r,e,beta,k,ni,w,b):
"""
Integrate predator population function
"""
return p + t * (e * beta * n / ( b + n + w * p )
* p - ni * p + d21 * nl + d22 * pl)
def generate_preys(x,y,epsilon,n_start):
"""
Generate the initial population of preys
"""
n = np.random.rand(x, y)
n = np.interp(n,(n.min(),n.max()),(-epsilon/2,epsilon/2))
n = n + n_start
return n
def generate_predators(x,y,p_start):
"""
Generate the initial population of predators
"""
p = np.ones((x,y))
p.fill(p_start)
return p
def generate_n(n0,n,p,x,y,neighbors,h,d11,d12,t,r,e,beta,k,ni,w,b):
"""
Vectorized element iteration attempt for preys
"""
i,j = np.where(n==n0) # this wouldnt work, need the current element
n_laplacian = get_laplacian(n,i,j,x,y,neighbors,h,True)
p_laplacian = get_laplacian(p,i,j,x,y,neighbors,h,False)
p0 = p[i,j]
return next_n(n0,p0,laplacian,d11,d12,t,r,e,beta,k,ni,w,b)
def generate_p(p0,p,n,x,y,neighbors,h,d21,d22,t,r,e,beta,k,ni,w,b):
"""
Vectorized element iteration attempt for predators
"""
i,j = np.where(p==p0) # this wouldnt work, need the current element
n_laplacian = get_laplacian(n,i,j,x,y,neighbors,h,True)
p_laplacian = get_laplacian(p,i,j,x,y,neighbors,h,False)
n0 = n[i,j]
return next_p(n0,p0,n_laplacian,
p_laplacian,d11,d12,d21,d22,t,r,e,beta,k,ni,w,b)
def generate_system(x,y,h,d11,d12,d21,d22,t,r,e,
beta,k,ni,w,b,ite,n_start,p_start,epsilon):
"""
System generation
"""
# Initial distribution
n = generate_preys(x,y,epsilon,n_start)
p = generate_predators(x,y,p_start)
#n = n.tolist()
#p = p.tolist()
ps = []
ns = []
# neighbor list for easy laplacian neighbor fetch
neighbors = [[-1,0],[1,0],[0,1],[0,-1]]
t1 = time.time()
for it in range(ite):
# record each iteration
old_n = copy.copy(n)
old_p = copy.copy(p)
ns.append(old_n)
ps.append(old_p)
# main array element iteration for prey and predator arrays
for i in range(x):
for j in range(y):
n_laplacian = get_laplacian(old_n,i,j,x,y,neighbors,h,True)
p_laplacian = get_laplacian(old_p,i,j,x,y,neighbors,h,False)
n0 = old_n[i][j]
p0 = old_p[i][j]
n[i][j] = next_n(n0,p0,n_laplacian,p_laplacian,
d11,d12,d21,d22,t,r,e,beta,k,ni,w,b)
p[i][j] = next_p(n0,p0,n_laplacian,p_laplacian,
d11,d12,d21,d22,t,r,e,beta,k,ni,w,b)
"""
n = generate_n(old_n,old_n,old_p,x,y,neighbors,
h,d11,d12,t,r,e,beta,k,ni,w,b)
p = generate_p(old_p,old_p,old_n,x,y,neighbors,
h,d21,d22,t,r,e,beta,k,ni,w,b)
"""
t2 = time.time()
print(t2-t1)
return ns,ps
ns,ps = generate_system(x=50,y=50,h=0.25,d11=0.01,d12=0.0115,d21=0.01,d22=1,
t=0.01,r=0.5,e=1,beta=0.6,k=2.6,ni=0.25,w=0.4,b=0.3154,
ite=10,n_start=0.43058,p_start=0.718555,epsilon=0.001)
在200x200的网格上,预期的输出将在几分钟内计算出1百万次迭代,而在40x40的网格上,10,000个迭代仅需要230秒
编辑
我设法对整个程序进行了矢量化处理。性能提升了400倍。哇
这是新代码:
import numpy as np
import copy
import time
def next_n(n,p,nl,pl,d11,d12,d21,d22,t,r,e,beta,k,ni,w,b):
"""
Integrate prey population function
"""
return n + t * (r * ( 1 - n / k ) * n
- beta * n / ( b + n + w * p ) * p + d11 * nl + d12 * pl)
def next_p(n,p,nl,pl,d11,d12,d21,d22,t,r,e,beta,k,ni,w,b):
"""
Integrate predator population function
"""
return p + t * (e * beta * n / ( b + n + w * p )
* p - ni * p + d21 * nl + d22 * pl)
def generate_preys(x,y,epsilon,n_start):
"""
Generate the initial population of preys
"""
n = np.random.rand(x, y)
n = np.interp(n,(n.min(),n.max()),(-epsilon/2,epsilon/2))
n = n + n_start
n[0,:] = n_start
n[-1:,:] = n_start
n[:,0] = n_start
n[:,-1:] = n_start
return n
def generate_predators(x,y,p_start):
"""
Generate the initial population of predators
"""
p = np.ones((x,y))
p.fill(p_start)
return p
def get_laps(a,x,y,h):
center = a[1:-1,1:-1]
left = a[1:-1,0:-2]
right = a[1:-1,2:]
top = a[0:-2,1:-1]
bottom = a[2:,1:-1]
return (left+right+top+bottom - 4*center) / (h**2)
def generate_system(x,y,h,d11,d12,d21,d22,t,r,e,
beta,k,ni,w,b,ite,n_start,p_start,epsilon):
"""
System generation
"""
# Initial distribution
n = generate_preys(x+2,y+2,epsilon,n_start)
p = generate_predators(x+2,y+2,p_start)
ps = []
ns = []
t1 = time.time()
for it in range(ite):
if it % 10000 == 0:
print(f"iterations passed: {it}")
ns.append(copy.copy(n))
ps.append(copy.copy(p))
# record each iteration
nl = get_laps(n,x,y,h)
pl = get_laps(p,x,y,h)
nc = n[1:-1,1:-1]
pc = p[1:-1,1:-1]
n[1:-1,1:-1] = next_n(nc,pc,nl,pl,d11,d12,d21,d22,t,r,e,beta,k,ni,w,b)
p[1:-1,1:-1] = next_p(nc,pc,nl,pl,d11,d12,d21,d22,t,r,e,beta,k,ni,w,b)
t2 = time.time()
print(f"Time taken: {t2-t1}")
return ns,ps
ns,ps = generate_system(x=200,y=200,h=0.25,d11=0.01,d12=0.0115,d21=0.01,d22=1,
t=0.01,r=0.5,e=1,beta=0.6,k=2.6,ni=0.25,w=0.4,b=0.3154,
ite=100,n_start=0.43058,p_start=0.718555,epsilon=0.001)