Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Full charge self-consistency with ABINIT #261

Open
wants to merge 6 commits into
base: unstable
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,9 @@ jobs:
env:
CC: ${{ matrix.cc }}
CXX: ${{ matrix.cxx }}
TRIQS_BRANCH: ${{ github.event_name == 'pull_request' && github.base_ref || github.ref_name }}
run: |
git clone https://github.com/TRIQS/triqs --branch ${{ github.ref_name }}
git clone https://github.com/TRIQS/triqs --branch $TRIQS_BRANCH
mkdir triqs/build && cd triqs/build
cmake .. -DBuild_Tests=OFF -DCMAKE_INSTALL_PREFIX=$HOME/install
make -j1 install VERBOSE=1
Expand Down
35 changes: 32 additions & 3 deletions python/triqs_dft_tools/converters/wannier90.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ def convert_dft_input(self):
shells_map = [inequiv_to_corr[corr_to_inequiv[icrsh]]
for icrsh in range(n_corr_shells)]
mpi.report('Mapping: {}'.format(shells_map))
mpi.report('\n')

# Second, let's read the file containing the Hamiltonian in WF basis
# produced by Wannier90
Expand Down Expand Up @@ -311,6 +312,7 @@ def convert_dft_input(self):
#used for certain routines within dft_tools if treating the inputs differently is required.
dft_code = 'w90'

mpi.report("\nSaving into HDF file.")
# Finally, save all required data into the HDF archive:
if mpi.is_master_node():
with HDFArchive(self.hdf_file, 'a') as archive:
Expand Down Expand Up @@ -928,18 +930,22 @@ def read_misc_input(w90_seed, n_spin_blocks, n_k):
nnkp_filename = w90_seed + '.nnkp'
locproj_filename = os.path.join(w90_seed_dir, 'LOCPROJ')
outcar_filename = os.path.join(w90_seed_dir, 'OUTCAR')
abiout_filename = w90_seed + '.abinit'

if os.path.isfile(nscf_filename):
read_from = 'qe'
mpi.report('Reading DFT band occupations from Quantum Espresso output {}'.format(nscf_filename))
elif os.path.isfile(locproj_filename) and os.path.isfile(outcar_filename):
read_from = 'vasp'
mpi.report('Reading DFT band occupations from Vasp output {}'.format(locproj_filename))
elif os.path.isfile(abiout_filename):
read_from = 'abinit'
mpi.report('Reading DFT band occupations from ABINIT output {}'.format(abiout_filename))
else:
raise IOError('seedname.nscf.out or LOCPROJ and OUTCAR required in bloch_basis mode')
raise IOError('%s, LOCPROJ and OUTCAR or %s required in bloch_basis mode' % (nscf_filename, abiout_filename))

assert n_spin_blocks == 1, 'spin-polarized not implemented'
assert read_from in ('qe', 'vasp')
assert read_from in ('qe', 'vasp', 'abinit')

occupations = []
reading_kpt_basis = False
Expand Down Expand Up @@ -981,7 +987,7 @@ def read_misc_input(w90_seed, n_spin_blocks, n_k):
occs = k_block[int(len(k_block)/2)+1:]
flattened_occs = [float(item) for sublist in occs for item in sublist]
occupations.append(flattened_occs)
else:
elif read_from == "vasp":
# Reads LOCPROJ
with open(locproj_filename, 'r') as file:
header = file.readline()
Expand All @@ -1001,6 +1007,29 @@ def read_misc_input(w90_seed, n_spin_blocks, n_k):
lines_read_kpt_basis += 1
if lines_read_kpt_basis == 3:
break
elif read_from == "abinit":
occupations = []
with open(abiout_filename, 'r') as out_file:
out_data = out_file.readlines()
size_block = 0
for l, line in enumerate(out_data):
if 'Fermie' in line:
fermi_energy = float(line.split()[-1])*27.2114079527
print("Fermi energy: ", fermi_energy)
elif "Nkpt" in line:
n_kpt = int(line.split()[-1])
print("Nkpt: ", n_kpt)
elif "Nband" in line:
n_ks = int(line.split()[-1])
print("Nband: ", n_ks)
size_block = int(np.ceil(n_ks/12))
print(size_block)

if "k-point " in line:
k_block = [line2.split() for line2 in out_data[l+1:l+1+size_block]]
occs = k_block
flattened_occs = [float(item)/2 for sublist in occs for item in sublist]
occupations.append(flattened_occs)

# assume that all bands contribute, then remove from exclude_bands; python indexing
corr_bands = list(range(n_ks))
Expand Down
40 changes: 31 additions & 9 deletions python/triqs_dft_tools/sumk_dft.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def read_input_from_hdf(self, subgrp, things_to_read):
Returns
-------
subgroup_present : boolean
Is the subgrp is present in hdf5 file?
Is the subgrp present in hdf5 file?
values_not_read : list of strings
List of things that could not be read

Expand Down Expand Up @@ -759,15 +759,15 @@ def extract_G_loc(self, mu=None, with_Sigma=True, with_dc=True, broadening=None,
if with_Sigma and hasattr(self, "Sigma_imp"):
mesh = self.Sigma_imp[0].mesh
if mesh != self.mesh:
warn('self.mesh and self.Sigma_imp[0].mesh are differen! Using mesh from Sigma')
warn('self.mesh and self.Sigma_imp[0].mesh are different! Using mesh from Sigma')
elif with_Sigma and not hasattr(self, "Sigma_imp"):
mpi.report('Warning: No Sigma set but parameter with_Sigma=True, calculating Gloc without Sigma.')
with_Sigma = False
mesh = self.mesh
else:
mesh = self.mesh

# create G_loc to be returned in sumk space for all correlated shells. Trafo to solver block structure done later
# create G_loc to be returned in sumk space for all correlated shells. Transform to solver block structure done later
G_loc = [self.block_structure.create_gf(ish=ish, mesh=mesh, space='sumk') for ish in range(self.n_corr_shells)]

ikarray = np.array(list(range(self.n_k)))
Expand Down Expand Up @@ -2136,7 +2136,7 @@ def calc_density_correction(self, filename=None, dm_type=None, spinave=False, kp
Name of the file to store the charge density correction.
dm_type : string
DFT code to write the density correction for. Options:
'vasp', 'wien2k', 'elk' or 'qe'. Needs to be set for 'qe'
'vasp', 'wien2k', 'elk', 'qe' or 'abinit'. Needs to be set for 'qe' or 'abinit'
spinave : logical
Elk specific and for magnetic calculations in DMFT only.
It averages the spin to keep the DFT part non-magnetic.
Expand All @@ -2162,7 +2162,7 @@ def calc_density_correction(self, filename=None, dm_type=None, spinave=False, kp
if dm_type is None:
dm_type = self.dft_code

assert dm_type in ('vasp', 'wien2k','elk', 'qe'), "'dm_type' must be either 'vasp', 'wienk', 'elk' or 'qe'"
assert dm_type in ('vasp', 'wien2k','elk', 'qe', 'abinit'), "'dm_type' must be either 'vasp', 'wienk', 'elk', 'qe' or 'abinit'"
#default file names
if filename is None:
if dm_type == 'wien2k':
Expand All @@ -2173,6 +2173,8 @@ def calc_density_correction(self, filename=None, dm_type=None, spinave=False, kp
filename = 'DMATDMFT.OUT'
elif dm_type == 'qe':
filename = self.hdf_file
elif dm_type == 'abinit':
filename = "abiout.delta_N"


assert isinstance(filename, str), ("calc_density_correction: "
Expand All @@ -2187,7 +2189,7 @@ def calc_density_correction(self, filename=None, dm_type=None, spinave=False, kp
band_en_correction = 0.0

# Fetch Fermi weights and energy window band indices
if dm_type in ['vasp','qe']:
if dm_type in ['vasp','qe', 'abinit']:
fermi_weights = 0
band_window = 0
if mpi.is_master_node():
Expand All @@ -2202,7 +2204,6 @@ def calc_density_correction(self, filename=None, dm_type=None, spinave=False, kp
for sp in spn:
dens_mat_dft[sp] = [fermi_weights[ik, ntoi[sp], :].astype(complex) for ik in range(self.n_k)]


# Set up deltaN:
deltaN = {}
for sp in spn:
Expand All @@ -2229,7 +2230,7 @@ def calc_density_correction(self, filename=None, dm_type=None, spinave=False, kp
dens[bname] += self.bz_weights[ik] * G_latt[bname].total_density()
else:
dens[bname] += self.bz_weights[ik] * G_latt[bname].total_density(beta)
if dm_type in ['vasp','qe']:
if dm_type in ['vasp', 'qe', 'abinit']:
# In 'vasp'-mode subtract the DFT density matrix
nb = self.n_orbitals[ik, ntoi[bname]]
diag_inds = np.diag_indices(nb)
Expand Down Expand Up @@ -2398,13 +2399,34 @@ def calc_density_correction(self, filename=None, dm_type=None, spinave=False, kp
for it in things_to_save:
ar[subgrp][it] = locals()[it]

elif dm_type == 'abinit':
if kpts_to_write is None:
kpts_to_write = np.arange(self.n_k)
else:
assert np.min(kpts_to_write) >= 0 and np.max(kpts_to_write) < self.n_k

assert self.SP == 0, "Spin-polarized density matrix is not implemented"

if mpi.is_master_node():
with open(filename, 'w') as f:
f.write(" %i -1 ! Number of k-points, default number of bands\n"%len(kpts_to_write))
for index, ik in enumerate(kpts_to_write):
ib1 = band_window[0][ik, 0]
ib2 = band_window[0][ik, 1]
f.write(" %i %i %i\n"%(index + 1, ib1, ib2))
for inu in range(self.n_orbitals[ik, 0]):
for imu in range(self.n_orbitals[ik, 0]):
valre = (deltaN['up'][ik][inu, imu].real + deltaN['down'][ik][inu, imu].real) / 2.0
valim = (deltaN['up'][ik][inu, imu].imag + deltaN['down'][ik][inu, imu].imag) / 2.0
f.write(" %.14f %.14f"%(valre, valim))
f.write("\n")

else:
raise NotImplementedError("Unknown density matrix type: '%s'"%(dm_type))

res = deltaN, dens

if dm_type in ['vasp', 'qe']:
if dm_type in ['vasp', 'qe', 'abinit']:
res += (band_en_correction,)

return res
Expand Down
Loading