BergEC-jl/calculations/PMM.py

82 lines
2.4 KiB
Python

#%%
import pandas as pd
import torch
import numpy as np
#%%
df = pd.read_csv('../temp/2body_data.csv').sort_values(by='c')
df['E'] = df['re_E'] + 1j * df['im_E']
train_data = df[df['re_E'] < 0]
target_data = df[df['re_E'] > 0]
train_cs = train_data['c'].to_numpy()
train_Es = torch.tensor(train_data['E'].to_numpy(), dtype=torch.complex128)
#%%
# hyperparameters
N = 9
# initialize random Hamiltonians
H0 = torch.randn(N, N, dtype=torch.complex128)
H0 = (H0 + torch.transpose(H0, 0, 1)).requires_grad_() # symmetric
H1 = torch.randn(N, N, dtype=torch.complex128)
H1 = (H1 + torch.transpose(H1, 0, 1)).requires_grad_() # symmetric
#%%
# training
# generate a set of c values to follow by subdividing the training cs
subdivisions = 3
c_steps = np.concatenate([np.linspace(start, stop, subdivisions, endpoint=False) for (start, stop) in zip(train_cs, train_cs[1:])])
c_steps = np.append(c_steps, train_cs[-1])
lr = 0.05
epochs = 100000
for epoch in range(epochs):
Es = torch.empty(len(train_data), dtype=torch.complex128)
current_E = 0.0 # start at the threshold
for c in c_steps:
H = H0 + c * H1
evals = torch.linalg.eigvals(H)
current_E = evals[torch.argmin(torch.abs(evals - current_E))]
if np.any(c == train_cs):
index = np.where(c == train_cs)[0][0]
Es[index] = current_E
loss = ((Es - train_Es).abs() ** 2).sum()
if epoch % 1000 == 0:
print(f"Training {(epoch+1)/epochs:.1%} \t Loss: {loss}")
if H0.grad is not None:
H0.grad.zero_()
if H1.grad is not None:
H1.grad.zero_()
loss.backward()
with torch.no_grad():
H0 -= lr * H0.grad
H1 -= lr * H1.grad
# %%
# evaluate for all points
all_c = torch.tensor(df['c'].values, dtype=torch.float64)
exact_E = torch.tensor(df['E'].values, dtype=torch.complex128)
pred_Es = torch.empty(len(df), dtype=torch.complex128)
with torch.no_grad():
for (index, (c, E)) in enumerate(zip(all_c, exact_E)):
H = H0 + c * H1
evals = torch.linalg.eigvals(H)
i = torch.argmin(torch.abs(evals - E)) # TODO: more robust way to identify the eigenvector
pred_Es[index]= evals[i]
# %%
# plot the results
import matplotlib.pyplot as plt
plt.scatter(train_data['re_E'], train_data['im_E'], label='training')
plt.scatter(target_data['re_E'], target_data['im_E'], label='target')
plt.scatter(pred_Es.real, pred_Es.imag, marker='x', label='predicted')
plt.legend()
# %%