Commit 86359a34 authored by Sayah-Sel's avatar Sayah-Sel
Browse files

several epsilon using finufft

parent c0fc42a5
import numpy as np
import jax.numpy as jnp
import scipy.spatial as sp
from numpy import fft as f
import multiprocessing as mp
import time
import finufft
import os
import warnings
os.environ['KMP_DUPLICATE_LIB_OK']='True'
d=2
L=np.pi
N=32
dx=L/N
mu=0
corr_L=["norm1","norm2"]
corr="norm2"
l_cV=0.2*np.pi
l_cA=0.2*np.pi
#e_L=[0.01,0.5,0.9,0.99]
e_L=[0.01,0.1,0.2,0.3,0.5,0.7,0.8,0.9]#0.99
#e_L=[0.1,0.3,0.5,0.7]
XI=[]
for k in range(d):
XI.append(f.fftfreq(2*N)*2*N*2*np.pi/(2*L))
xi=np.array(np.meshgrid(*XI,indexing='ij'))
X= np.mgrid[tuple(slice(- L, L, dx) for _ in range(d))].T
Y=X.reshape((-1,d))
I=np.ones_like(Y.sum(axis=-1))
if corr=="norm2":
p=2
elif corr=="norm1":
p=1
AX=[-k for k in range(1,d+1)]
samples=24 #several disorder per value of epsilon to reduce the error
th=24
def make_V(size=1,d=d,l_c=l_cV,xi=xi,N=N,corr=corr,nb=len(e_L)):
if corr=="norm2":
k2=(xi**2).sum(axis=0)
covVH=(2*np.pi*l_c**2)**(d/2)*np.exp(-l_c**2*k2/4)
elif corr=="norm1":
truc=2*l_c/(1+(l_c*xi)**2)
covVH=2*truc.prod(axis=0)
if size>1:
list_Vh=[]
for k in range(size):
Vh=covVH*(np.random.standard_normal(covVH.shape)+1j*np.random.standard_normal(covVH.shape))
list_Vh.append(Vh)
list_Vh=np.array(list_Vh)
return list_Vh
else:
Vh=covVH*(np.random.standard_normal(covVH.shape)+1j*np.random.standard_normal(covVH.shape))
return Vh
def make_A(size=1,d=d,l_c=l_cA,xi=xi,N=N,corr=corr,nb=len(e_L)):
Ah=np.zeros((d,d)+xi[0].shape,dtype=np.complex128)
if corr=="norm2":
k2=(xi**2).sum(axis=0)
covAH=(2*np.pi*l_c**2)**(d/2)*np.exp(-l_c**2*k2/4)
elif corr=="norm1":
truc=2*l_c/(1+(l_c*xi)**2)
covAH=2*truc.prod(axis=0)
if size>1:
list_Ah=[]
for _ in range(size):
for i in range(1,d):
for j in range(i):
Ah[i,j,:]=covAH*(np.random.standard_normal(covAH.shape)+1j*np.random.standard_normal(covAH.shape))
Ah[j,i,:]=-Ah[i,j,:]
list_Ah.append(Ah)
list_Ah=np.array(list_Ah)
return list_Ah
else:
for i in range(1,d):
for j in range(i):
Ah[i,j,:]=covAH*(np.random.standard_normal(covAH.shape)+1j*np.random.standard_normal(covAH.shape))
Ah[j,i,:]=-Ah[i,j,:]
return Ah
Vhl=make_V(samples*len(e_L))
Ahl=make_A(samples*len(e_L))
eL=np.array([np.ones(samples)*e for e in e_L]).flatten()
#finufft is a library based on fftw so we first implement the (non-uniform) fft plan that wil be executed to evaluate the forces
plan=finufft.Plan(nufft_type=2,n_modes_or_dim=(2*N,)*d,modeord=1,eps=1e-6,isign=1,n_trans=d,nthreads=1,fftw=64)
Nt=200
Time=int(10**3)
dt=2*10**-3
def mean_on_disorder(Vh,Ah,e,plan=plan,Nt=Nt,Time=Time,dt=dt):
warnings.filterwarnings("ignore")
v=1-e
a=e
nabVh=v*np.array([1j*xi[k]*Vh for k in range(d)])
rotAh=a*np.array([np.array([1j*xi[j]*Ah[i,j,:] for j in range(d)]).sum(axis=0) for i in range(d)])/np.sqrt(d-1)
fh=-nabVh+rotAh
np.random.seed() #important, otherwise all the process will have the same random seed and so will generate the same starting points
T=0.0
threshold=dt*(T+0.1)
def run(Time=Time,d=d,dt=dt,T=T,threshold=threshold,plan=plan):
x=np.random.uniform(-L,L,size=(d,1))
points=np.empty((d,Time))
fixe=0
force=np.empty((d,1))
for k in range(Time):
points[:,k]=x[:,0]
plan.setpts(*x) #the current position is set as "non-uniform point" of the finufft plan
plan.execute(fh,out=force) #it is then executed such that the ouput is stored in force
u=force #+np.random.standard_normal(size=(d,1))*T
x_new=x+dt*u
x=(L+x_new)%(2*L)-L
if k>50 and np.std(points[:,k-20:k],axis=1).sum()<threshold: #stop condition: stop trajectories which have converged to something
fixe=1 #if the trajectory found a fixed point
return x.T[0],fixe,k
# TODO: add another stop condition to tackle the case where the trajectory is obviously not converging to anything
return x.T[0],fixe,k
def dist(Nt,d=d,Time=Time,dt=dt,T=T,mu=mu):
points=np.empty((Nt,d))
fixed=np.empty((Nt,))
nb_it=np.empty((Nt,))
for k in range(Nt):
points[k],fixed[k],nb_it[k]=run()
return points, fixed, nb_it
points,fixed,K=dist(Nt)
points_eq=points[fixed==1]
if len(points_eq)==0:
return [0,e,Time]
hi=np.histogramdd(points_eq,bins=40) #the unique are count with an appropriate binning using numpy histograms, not absolutely necessary at T=0, but it is for non-vanishing temperature
nb=hi[0]
nb_eq=len(nb[nb>0])
return [nb_eq,e,K[fixed==1].mean()]
nb_eq_moy=[]
nb_eq_std=[]
nb_eq_time=[]
t1=time.time()
if __name__=='__main__':
P=mp.Pool(th)
argg=[(Vhl[k],Ahl[k],eL[k]) for k in range(samples*len(e_L))]
out=P.starmap_async(mean_on_disorder,argg) #the computations are parallelized on disorders: each process/thread take one disorder at a time
P.close()
P.join()
nb_eq,e_out,K_max=np.array(out.get()).T
t2=time.time()
print(t2-t1)
for e in e_L:
nb_eq_moy.append(nb_eq[e_out==e].mean())
nb_eq_std.append(nb_eq[e_out==e].std())
nb_eq_time.append(K_max[(e_out==e) & (K_max!=None)].mean())
result=pd.DataFrame(data=[nb_eq_moy,nb_eq_std,nb_eq_time],index=e_L,columns=["mean eq","std eq", "mean time"])
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment