Skip to content

Commit

Permalink
Fixed issue saving nodes locations in MPI
Browse files Browse the repository at this point in the history
  • Loading branch information
aasensio committed Apr 23, 2024
1 parent a32bcbe commit e4ca64d
Show file tree
Hide file tree
Showing 8 changed files with 90 additions and 79 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,5 +43,5 @@ jobs:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: coverage.lcov

- name: Build wheels
uses: pypa/cibuildwheel@v2.16.0
# - name: Build wheels
# uses: pypa/cibuildwheel@v2.16.0
34 changes: 30 additions & 4 deletions docs/started/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,15 @@ For a Linux OS, type:

::

conda create -n hazel_env python=3.6
conda create -n hazel_env python=3.10
conda activate hazel_env
conda install -c conda-forge cython numpy h5py tqdm scipy astropy mpich mpi4py configobj gfortran_linux-64 gcc_linux-64 asciitree

For Mac OS, type:
A different version of Python can be installed in the environment. For Mac OS, type:

::

conda create -n hazel_env python=3.6
conda create -n hazel_env python=3.10
conda activate hazel_env
conda install -c conda-forge cython numpy h5py tqdm scipy astropy mpich mpi4py configobj gfortran_osx-64 gcc_osx-64 asciitree

Expand All @@ -43,7 +43,7 @@ If you also want to use the GUI, you need to add two new dependencies:

::

conda create -n hazel_env python=3.6
conda create -n hazel_env python=3.10
conda activate hazel_env
conda install -c conda-forge cython numpy h5py tqdm scipy astropy mpich mpi4py configobj gfortran_linux-64 gcc_linux-64 matplotlib pyqt asciitree

Expand All @@ -53,6 +53,14 @@ Remember to add `ipython` if you are using this shell to run |hazel2|. Otherwise
If you want to use the NLTE neural option for synthesizing the Ca II 854.2 nm line, the packages ``pytorch``,
``pytorch_geometric`` and ``pytorch-scatter`` should be installed. Take a look at the documentation for `PyTorch <https://pytorch.org/>`_
, `PyTorch Geometric <https://pytorch-geometric.readthedocs.io/en/latest/>`_ `PyTorch Scatter <https://github.com/rusty1s/pytorch_scatter>`_ for more information.
A typical installation using `pip` can be done using:

::
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
pip install torch_geometric
pip install pyg_lib torch_scatter torch_sparse torch_cluster torch_spline_conv -f https://data.pyg.org/whl/torch-2.2.0+cpu


Installation from source
------------------------
Expand Down Expand Up @@ -129,6 +137,24 @@ All of them can be installed in Anaconda with:

conda install numpy h5py scipy astropy mpi4py configobj tqdm pyqt maplotlib

Container
---------

A simple way of installing |hazel2|, at least in Linux systems or supercomputers, is to use
this Apptainer `container <https://cloud.iac.es/index.php/s/BqG2jGRsqHtJMW9>`_.
This container has all the dependencies installed and it is ready to be used. You can download it (we try to
keep this version as updated as possible) and run it with Apptainer as:

::
apptainer run hazel.sif python invert.py

In principle, you can build your own container by going to the `apptainer` directory in the
distribution and running:

::
apptainer build hazel.sif hazel.def


For developers
--------------
Remember that if you want to be involved in the development of the code, it is perhaps more
Expand Down
24 changes: 5 additions & 19 deletions examples/configurations/conf_mpi_invh5.ini
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ Number of cycles = 1
Bx = -1000.0, 1000.0
By = -1000.0, 1000.0
Bz = -1000.0, 1000.0
ff = 0.0, 1.0
ff = 0.0, 1.0001
vmac = 0.0, 5.0

[[[Nodes]]]
T = 2, 3, 5, 5
vmic = 0, 1, 1, 1
v = 0, 1, 1, 1
vmic = 1, 1, 1, 1
v = 1, 1, 1, 1
Bx = 0, 1, 1, 1
By = 0, 1, 1, 1
Bz = 0, 1, 1, 1
Expand Down Expand Up @@ -72,7 +72,7 @@ Number of cycles = 1
deltav = 3.0, 12.0
beta = 1.0, 2.0
a = 0.0, 1.0
ff = 0.0, 1.0
ff = 0.0, 1.0001


[[[Nodes]]]
Expand Down Expand Up @@ -105,18 +105,4 @@ Number of cycles = 1
Sigma = 0, 0, 0, 0
Depth = 0, 0, 0, 0
a = 0, 0, 0, 0
ff = 0, 0, 0, 0

[[Straylight 1]]
Name = st1
Spectral region = spec1
Wavelength = 10826, 10833
Reference atmospheric model = 'straylight/model_stray.1d' # File with model parameters

[[[Ranges]]]
v = -1.0, 1.0
ff = 0.0, 1.0

[[[Nodes]]]
v = 0, 0, 0, 0
ff = 0, 0, 0, 0
ff = 0, 0, 0, 0
6 changes: 3 additions & 3 deletions hazel/forward_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,8 @@ def __init__(self, gpu=0, checkpoint=None, readir=None, verbose=0):
self.predict_model = graphnet.EncodeProcessDecode(**self.hyperparameters).to(self.device)
self.predict_model.load_state_dict(checkpoint['state_dict'])

self.predict_model.eval()

if (verbose >= 1):
npars = sum(p.numel() for p in self.predict_model.parameters() if p.requires_grad)
tmp = self.checkpoint.split('/')
Expand All @@ -151,9 +153,7 @@ def predict(self, tau_all, ne_all, vturb_all, T_all, vlos_all):
dset = Dataset(self.hyperparameters, tau_all, ne_all, vturb_all, T_all, vlos_all)

self.loader = torch_geometric.loader.DataLoader(dset, batch_size=1, shuffle=False)

self.predict_model.eval()


self.pred_out = []

with torch.no_grad():
Expand Down
13 changes: 7 additions & 6 deletions hazel/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,12 +217,13 @@ def write(self, model, pixel=0, randomization=0):
else:
self.out_model[k][k2][pixel,randomization,...] = v2

# Model node positions
for k2, v2 in v.nodes_logtau_cycle[cycle].items():
if (model.save_all_cycles):
self.out_nodes[k][k2][pixel,randomization,cycle] = np.atleast_1d(v2)
else:
self.out_nodes[k][k2][pixel,randomization] = np.atleast_1d(v2)
# # Model node positions
# CHECK!!!!!!!!!!!!!!!!!
# for k2, v2 in v.nodes_logtau_cycle[cycle].items():
# if (model.save_all_cycles):
# self.out_nodes[k][k2][pixel,randomization,cycle] = np.atleast_1d(v2)
# else:
# self.out_nodes[k][k2][pixel,randomization] = np.atleast_1d(v2)

# Model parameter errors
for k2, v2 in v.error_cycle[cycle].items():
Expand Down
15 changes: 8 additions & 7 deletions hazel/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1749,7 +1749,7 @@ def compute_chi2(self, only_chi2=False, weights=None):
if (v.interpolate_to_lr):
residual = (v.stokes_lr - v.obs)
else:
residual = (v.stokes - v.obs)
residual = (v.stokes - v.obs)

# Do not use weights. This is used for the computation of errors
# if (weights is None):
Expand Down Expand Up @@ -1993,7 +1993,7 @@ def invert(self, randomize=False, randomization_ind=None):
"""

first = True
first = True

# Reset reference model to the one loaded from the file
for k, v in self.atmospheres.items():
Expand Down Expand Up @@ -2065,10 +2065,10 @@ def invert(self, randomize=False, randomization_ind=None):
keepon = True
iteration = 0

# Main Levenberg-Marquardt algorithm
# Main Levenberg-Marquardt algorithm
self.synthesize_and_compute_rf(compute_rf=True)
chi2, dchi2, ddchi2 = self.compute_chi2()

chi2, dchi2, ddchi2 = self.compute_chi2()
while keepon:

# Simple parabolic backtracking
Expand Down Expand Up @@ -2125,7 +2125,7 @@ def invert(self, randomize=False, randomization_ind=None):

self.synthesize_and_compute_rf(compute_rf=True)

chi2, dchi2, ddchi2 = self.compute_chi2()
chi2, dchi2, ddchi2 = self.compute_chi2()

rel = 2.0 * (chi2 - bestchi2) / (chi2 + bestchi2)

Expand All @@ -2148,7 +2148,7 @@ def invert(self, randomize=False, randomization_ind=None):
if (self.verbose >= 2):
self.logger.info('==============================================================================')
self.logger.info('It: {0} - chi2: {1:10.6f} - lambda_opt: {2:10.6f} - rel: {3:10.6f}'.format(iteration, chi2, lambda_opt, np.abs(rel)))
self.logger.info('==============================================================================')
self.logger.info('==============================================================================')

# Increase the optimal by 100 to find again the optimal value
lambdaLM = 100.0 * lambda_opt
Expand All @@ -2162,6 +2162,7 @@ def invert(self, randomize=False, randomization_ind=None):

self.set_new_model(self.nodes)


# Calculate final chi2
# self.synthesize_and_compute_rf()
# chi2 = self.compute_chi2(only_chi2=True)
Expand Down
56 changes: 28 additions & 28 deletions hazel/multiprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,42 +362,42 @@ def mpi_workers_work(self):
data_to_send[label] = {}

# Try to do the inversion
try:
self.model.invert(randomize=randomize, randomization_ind=loop)
data_to_send['error'] = 0
for k, v in self.model.spectrum.items():
if (v.interpolate_to_lr):
data_to_send[label][k] = copy.deepcopy([self.model.spectrum[k].stokes_cycle, self.model.spectrum[k].chi2_cycle, self.model.spectrum[k].bic_cycle, self.model.spectrum[k].aic_cycle, self.model.spectrum[k].stokes_lr_cycle])
else:
data_to_send[label][k] = copy.deepcopy([self.model.spectrum[k].stokes_cycle, self.model.spectrum[k].chi2_cycle, self.model.spectrum[k].bic_cycle, self.model.spectrum[k].aic_cycle])
# try:
self.model.invert(randomize=randomize, randomization_ind=loop)
data_to_send['error'] = 0
for k, v in self.model.spectrum.items():
if (v.interpolate_to_lr):
data_to_send[label][k] = copy.deepcopy([self.model.spectrum[k].stokes_cycle, self.model.spectrum[k].chi2_cycle, self.model.spectrum[k].bic_cycle, self.model.spectrum[k].aic_cycle, self.model.spectrum[k].stokes_lr_cycle])
else:
data_to_send[label][k] = copy.deepcopy([self.model.spectrum[k].stokes_cycle, self.model.spectrum[k].chi2_cycle, self.model.spectrum[k].bic_cycle, self.model.spectrum[k].aic_cycle])

for k, v in self.model.atmospheres.items():
data_to_send[label][k] = copy.deepcopy([v.reference_cycle, v.error_cycle, v.nodes_location_cycle])
for k, v in self.model.atmospheres.items():
data_to_send[label][k] = copy.deepcopy([v.reference_cycle, v.error_cycle, v.nodes_logtau_cycle])

# If a numerical problem appeared, send the error code to the parent
except NumericalErrorHazel:
data_to_send['error'] = 1
for k, v in self.model.spectrum.items():
data_to_send[label][k] = None
# except NumericalErrorHazel:
# data_to_send['error'] = 1
# for k, v in self.model.spectrum.items():
# data_to_send[label][k] = None

for k, v in self.model.atmospheres.items():
data_to_send[label][k] = None
# for k, v in self.model.atmospheres.items():
# data_to_send[label][k] = None

except NumericalErrorSIR:
data_to_send['error'] = 2
for k, v in self.model.spectrum.items():
data_to_send[label][k] = None
# except NumericalErrorSIR:
# data_to_send['error'] = 2
# for k, v in self.model.spectrum.items():
# data_to_send[label][k] = None

for k, v in self.model.atmospheres.items():
data_to_send[label][k] = None
# for k, v in self.model.atmospheres.items():
# data_to_send[label][k] = None

except:
data_to_send['error'] = 3
for k, v in self.model.spectrum.items():
data_to_send[label][k] = None
# except:
# data_to_send['error'] = 3
# for k, v in self.model.spectrum.items():
# data_to_send[label][k] = None

for k, v in self.model.atmospheres.items():
data_to_send[label][k] = None
# for k, v in self.model.atmospheres.items():
# data_to_send[label][k] = None

else:
for k, v in self.model.atmospheres.items():
Expand Down
17 changes: 7 additions & 10 deletions hazel/photosphere.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,11 +243,9 @@ def interpolate_nodes(self, log_tau, reference, nodes, nodes_location):
f = interp.interp1d(log_tau_pos, nodes, 'quadratic', bounds_error=False, fill_value='extrapolate')
return reference + f(log_tau), pos

if (n_nodes > 3):
if (np.all(np.diff(log_tau_pos) < 0)):
log_tau_pos_temp = log_tau_pos[::-1]
nodes_temp = nodes[::-1]
f = interp.PchipInterpolator(log_tau_pos_temp, nodes_temp, extrapolate=True)
if (n_nodes > 3):
if (np.all(np.diff(log_tau_pos) < 0)):
f = interp.PchipInterpolator(log_tau_pos[::-1], nodes[::-1], extrapolate=True)
else:
f = interp.PchipInterpolator(log_tau_pos, nodes, extrapolate=True)
return reference + f(log_tau), pos
Expand Down Expand Up @@ -478,7 +476,7 @@ def nodes_to_model(self):
"""
for k, v in self.nodes.items():
if (self.n_nodes[k] > 0):
self.parameters[k], self.nodes_index[k] = self.interpolate_nodes(self.log_tau, self.reference[k], self.nodes[k], self.nodes_logtau[k])
self.parameters[k], self.nodes_index[k] = self.interpolate_nodes(self.log_tau, self.reference[k], self.nodes[k], self.nodes_logtau[k])
else:
self.parameters[k] = self.reference[k]

Expand Down Expand Up @@ -666,11 +664,10 @@ def synthesize(self, stokes_in, returnRF=False, nlte=False):
self.parameters['Bz'])

# Check if the line is 8542 and we want NLTE. If that is the case, then evaluate the
# neural network to return the departure coefficients

# neural network to return the departure coefficients
if (nlte):
if (self.nlte):
dif = (self.parameters['T'] - self.t_old)
dif = (self.parameters['T'] - self.t_old)
if (np.max(dif) > self.t_change_departure):
for i, l in enumerate(self.lines):
if (l == 301):
Expand All @@ -685,7 +682,7 @@ def synthesize(self, stokes_in, returnRF=False, nlte=False):
vlos = [self.parameters['v'][::-1] * 1e3] # in m/s
prediction = self.graphnet_nlte.predict(tau, ne, vturb, tt, vlos)
self.departure[0, i, :] = 10.0**prediction[0][::-1, 2]
self.departure[1, i, :] = 10.0**prediction[0][::-1, 4]
self.departure[1, i, :] = 10.0**prediction[0][::-1, 4]

self.t_old = self.parameters['T']

Expand Down

0 comments on commit e4ca64d

Please sign in to comment.