Commit 16e3a7e8 authored by Sayah-Sel's avatar Sayah-Sel
Browse files

generate force with autograd

parent 63318170
......@@ -6,6 +6,8 @@ from numpy import fft as f
from autograd import grad, jacobian
from autograd import elementwise_grad as egrad
##
# parameters
d=2
#dx=0.02
......@@ -24,8 +26,9 @@ l_cA=0.3
alpha_A=1
alpha_V=1
v=1.0
a=1.0
e=0.2
v=e
a=1-e
def gamma_V_func(r):
return v**2*np.exp(-1/2*(r/l_cV)**(2*alpha_V))
......@@ -84,9 +87,37 @@ def Gamma_AX(x,Y):
r=npa.sqrt(((Y-x)**2).sum(axis=-1))
return a**2*npa.exp(-(r/l_cA)**alpha_A)
#potential and vector potential functions ready to use
def V_func(x,Y=Y):
return npa.dot(Gamma_VX(x,Y),KV)
def A_func(x,Y=Y):
return npa.tensordot(Gamma_AX(x,Y),KA,[[1],[2]])
## generate force
nablaV=egrad(V_func,argnum=0)
coordA={}
for i in range(1,d):
for j in range(i):
def A_coord(x,Y=Y):
AT=npa.tensordot(Gamma_AX(x,Y),KA,[[1],[2]]).T
return AT[i,j,:]
coordA[(i,j)]=egrad(A_coord)
def jacobA(x,coordA=coordA):
JA=np.zeros((d,d)+x.shape)
for i in range(1,d):
for j in range(i):
JA[i,j,:]=coordA[(i,j)](x)
JA[j,i,:]=-JA[i,j,:]
return JA
def rotA(x):
jacA=jacobA(x)*1/np.sqrt(2)
return np.einsum('ij...j -> ...i',jacA)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment