🔳 l2hmc-qcd Example: 4D SU(3)

LQCD
MCMC
HMC
SU3
l2hmc
Author
Affiliation
Published

July 24, 2025

Modified

August 22, 2025

# %matplotlib inline
import matplotlib_inline
matplotlib_inline.backend_inline.set_matplotlib_formats('svg')
import os
os.environ['COLORTERM'] = 'truecolor'
import lovely_tensors as lt
lt.monkey_patch()
lt.set_config(color=False)
# automatically detect and reload local changes to modules
%load_ext autoreload
%autoreload 2
import ezpz
import numpy as np
import matplotlib.pyplot as plt
from l2hmc.utils.plot_helpers import FigAxes
import ambivalent
plt.style.use(ambivalent.STYLES['ambivalent'])
#set_plot_style()
Using device: cpu
import ezpz
from pathlib import Path
from typing import Optional
from rich import print

import lovely_tensors as lt
import matplotlib.pyplot as plt
import numpy as np
import torch
import yaml

# from l2hmc.utils.dist import setup_torch
seed = np.random.randint(2 ** 32)
print(f"seed: {seed}")

_ = ezpz.setup_torch(seed=seed)
torch.set_default_dtype(torch.float64)
# _ = setup_torch(precision='float64', backend='DDP', seed=seed, port='1234')

logger = ezpz.get_logger()

import l2hmc.group.su3.pytorch.group as g
# from l2hmc.utils.rich import get_console
from l2hmc.common import grab_tensor, print_dict
from l2hmc.configs import dict_to_list_of_overrides, get_experiment
from l2hmc.experiment.pytorch.experiment import Experiment, evaluate  # noqa
seed: 1681930688
[2025-07-24 16:56:35][I][ezpz/dist:558] Using get_torch_device_type()='mps' with backend='gloo'
[2025-07-24 16:56:35][I][ezpz/dist:869] Using device='mps' with backend='DDP' + 'gloo' for distributed training.
[2025-07-24 16:56:35][I][ezpz/dist:919] ['Sams-MacBook-Pro-2.local'][0/0] 
[2025-07-24 16:56:42,158] [INFO] [real_accelerator.py:239:get_accelerator] Setting ds_accelerator to mps (auto detect)
[rank0]:W0724 16:56:43.556000 8749 torch/distributed/elastic/multiprocessing/redirects.py:29] NOTE: Redirects are currently not supported in Windows or MacOs.
from l2hmc.utils.plot_helpers import (  # noqa
    plot_scalar,
    plot_chains,
    plot_leapfrogs
)

def savefig(fig: plt.Figure, fname: str, outdir: os.PathLike):
    pngfile = Path(outdir).joinpath(f"pngs/{fname}.png")
    svgfile = Path(outdir).joinpath(f"svgs/{fname}.svg")
    pngfile.parent.mkdir(exist_ok=True, parents=True)
    svgfile.parent.mkdir(exist_ok=True, parents=True)
    fig.savefig(svgfile, transparent=True, bbox_inches='tight')
    fig.savefig(pngfile, transparent=True, bbox_inches='tight', dpi=300)

def plot_metrics(metrics: dict, title: Optional[str] = None, **kwargs):
    outdir = Path(f"./plots-4dSU3/{title}")
    outdir.mkdir(exist_ok=True, parents=True)
    for key, val in metrics.items():
        fig, ax = plot_metric(val, name=key, **kwargs)
        if title is not None:
            ax.set_title(title)
        console.log(f"Saving {key} to {outdir}")
        savefig(fig, f"{key}", outdir=outdir)
        plt.show()

def plot_metric(
        metric: torch.Tensor,
        name: Optional[str] = None,
        **kwargs,
):
    assert len(metric) > 0
    if isinstance(metric[0], (int, float, bool, np.floating)):
        y = np.stack(metric)
        return plot_scalar(y, ylabel=name, **kwargs)
    element_shape = metric[0].shape
    if len(element_shape) == 2:
        y = grab_tensor(torch.stack(metric))
        return plot_leapfrogs(y, ylabel=name)
    if len(element_shape) == 1:
        y = grab_tensor(torch.stack(metric))
        return plot_chains(y, ylabel=name, **kwargs)
    if len(element_shape) == 0:
        y = grab_tensor(torch.stack(metric))
        return plot_scalar(y, ylabel=name, **kwargs)
    raise ValueError

Load config + build Experiment

from rich import print

from l2hmc.configs import CONF_DIR
su3conf = Path(f"{CONF_DIR}/su3test.yaml")
with su3conf.open('r') as stream:
    conf = dict(yaml.safe_load(stream))
import json
from rich import print_json
print_json(json.dumps(conf, indent=4, sort_keys=True))
overrides = dict_to_list_of_overrides(conf)
{
  "annealing_schedule": {
    "beta_final": 6.0,
    "beta_init": 6.0
  },
  "backend": "DDP",
  "conv": "none",
  "dynamics": {
    "eps": 0.01,
    "eps_fixed": false,
    "group": "SU3",
    "latvolume": [
      4,
      4,
      4,
      4
    ],
    "merge_directions": true,
    "nchains": 8,
    "nleapfrog": 4,
    "use_separate_networks": false,
    "use_split_xnets": false,
    "verbose": true
  },
  "framework": "pytorch",
  "init_aim": false,
  "init_wandb": false,
  "learning_rate": {
    "clip_norm": 1.0,
    "lr_init": "1e-04"
  },
  "loss": {
    "aux_weight": 0.0,
    "charge_weight": 0.0,
    "plaq_weight": 0.1,
    "rmse_weight": 0.1,
    "use_mixed_loss": true
  },
  "net_weights": {
    "v": {
      "q": 1.0,
      "s": 1.0,
      "t": 1.0
    },
    "x": {
      "q": 1.0,
      "s": 0.0,
      "t": 1.0
    }
  },
  "network": {
    "activation_fn": "tanh",
    "dropout_prob": 0.0,
    "units": [
      256
    ],
    "use_batch_norm": false
  },
  "restore": false,
  "save": false,
  "steps": {
    "log": 1,
    "nepoch": 10,
    "nera": 1,
    "print": 1,
    "test": 50
  },
  "use_tb": false,
  "use_wandb": false
}
ptExpSU3 = get_experiment(overrides=[*overrides], build_networks=True)

# console.print(ptExpSU3.config)
state = ptExpSU3.trainer.dynamics.random_state(6.0)
logger.info(f"checkSU(state.x): {g.checkSU(state.x)}")
logger.info(f"checkSU(state.x): {g.checkSU(g.projectSU(state.x))}")
assert isinstance(state.x, torch.Tensor)
assert isinstance(state.beta, torch.Tensor)
assert isinstance(ptExpSU3, Experiment)
[2025-07-24 16:56:45][I][utils/dist:229:l2hmc.utils.dist] Caught MASTER_PORT:1234 from environment!
[2025-07-24 16:56:45][I][utils/dist:229:l2hmc.utils.dist] Caught MASTER_PORT:1234 from environment!
[2025-07-24 16:56:45][W][pytorch/trainer:470:l2hmc.trainers.pytorch.trainer] Using torch.float32 on cpu!
[2025-07-24 16:56:45][W][pytorch/trainer:470:l2hmc.trainers.pytorch.trainer] Using `torch.optim.Adam` optimizer
[2025-07-24 16:56:45][I][pytorch/trainer:308:l2hmc.trainers.pytorch.trainer] num_params in model: 27880456
[2025-07-24 16:56:46][W][pytorch/trainer:274:l2hmc.trainers.pytorch.trainer] logging with freq 1 for wandb.watch
[2025-07-24 16:56:46][I][ipykernel_8749/1455121896:5:ezpz.log] checkSU(state.x): (tensor[8] f64 x∈[1.054e-14, 3.936e-13] μ=1.177e-13 σ=1.611e-13 [2.090e-14, 1.054e-14, 1.370e-14, 1.959e-14, 5.431e-14, 7.053e-14, 3.936e-13, 3.585e-13], tensor[8] f64 x∈[2.063e-13, 1.259e-11] μ=3.648e-12 σ=5.216e-12 [4.854e-13, 2.314e-13, 2.063e-13, 5.449e-13, 1.509e-12, 2.180e-12, 1.259e-11, 1.143e-11])
[2025-07-24 16:56:46][I][ipykernel_8749/1455121896:6:ezpz.log] checkSU(state.x): (tensor[8] f64 x∈[2.791e-16, 2.884e-16] μ=2.827e-16 σ=3.093e-18 [2.884e-16, 2.833e-16, 2.805e-16, 2.817e-16, 2.825e-16, 2.791e-16, 2.858e-16, 2.803e-16], tensor[8] f64 x∈[8.725e-16, 9.331e-16] μ=9.092e-16 σ=1.988e-17 [9.302e-16, 9.106e-16, 8.725e-16, 9.110e-16, 9.111e-16, 9.149e-16, 9.331e-16, 8.900e-16])
# from l2hmc.utils.plot_helpers import set_plot_style
# set_plot_style()

from l2hmc.common import get_timestamp
TSTAMP = get_timestamp()
OUTPUT_DIR = Path(f"./outputs/pt4dSU3/{TSTAMP}")
HMC_DIR = OUTPUT_DIR.joinpath('hmc')
EVAL_DIR = OUTPUT_DIR.joinpath('eval')
TRAIN_DIR = OUTPUT_DIR.joinpath('train')
HMC_DIR.mkdir(exist_ok=True, parents=True)
EVAL_DIR.mkdir(exist_ok=True, parents=True)
TRAIN_DIR.mkdir(exist_ok=True, parents=True)
ptExpSU3.trainer.print_grads_and_weights()
logger.info(ptExpSU3.config)
#console.print(ptExpSU3.config)
[2025-07-24 16:56:46][I][pytorch/trainer:2003:l2hmc.trainers.pytorch.trainer] --------------------------------------------------------------------------------
[2025-07-24 16:56:46][I][pytorch/trainer:2004:l2hmc.trainers.pytorch.trainer] GRADS:
[2025-07-24 16:56:46][I][l2hmc/common:97] networks.xnet.input_layer.xlayer.weight: None None 
None
networks.xnet.input_layer.xlayer.bias: None None 
None
networks.xnet.input_layer.vlayer.weight: None None 
None
networks.xnet.input_layer.vlayer.bias: None None 
None
networks.xnet.scale.coeff: None None 
None
networks.xnet.scale.layer.weight: None None 
None
networks.xnet.scale.layer.bias: None None 
None
networks.xnet.transf.coeff: None None 
None
networks.xnet.transf.layer.weight: None None 
None
networks.xnet.transf.layer.bias: None None 
None
networks.xnet.transl.weight: None None 
None
networks.xnet.transl.bias: None None 
None
networks.vnet.input_layer.xlayer.weight: None None 
None
networks.vnet.input_layer.xlayer.bias: None None 
None
networks.vnet.input_layer.vlayer.weight: None None 
None
networks.vnet.input_layer.vlayer.bias: None None 
None
networks.vnet.scale.coeff: None None 
None
networks.vnet.scale.layer.weight: None None 
None
networks.vnet.scale.layer.bias: None None 
None
networks.vnet.transf.coeff: None None 
None
networks.vnet.transf.layer.weight: None None 
None
networks.vnet.transf.layer.bias: None None 
None
networks.vnet.transl.weight: None None 
None
networks.vnet.transl.bias: None None 
None
xeps.0: None None 
None
xeps.1: None None 
None
xeps.2: None None 
None
xeps.3: None None 
None
veps.0: None None 
None
veps.1: None None 
None
veps.2: None None 
None
veps.3: None None 
None
[2025-07-24 16:56:46][I][pytorch/trainer:2006:l2hmc.trainers.pytorch.trainer] --------------------------------------------------------------------------------
[2025-07-24 16:56:46][I][pytorch/trainer:2007:l2hmc.trainers.pytorch.trainer] WEIGHTS:
[2025-07-24 16:56:46][I][l2hmc/common:97] networks.xnet.input_layer.xlayer.weight: torch.Size([256, 18432]) torch.float64 
[[ 1.64547981e-03 -1.73092676e-03  5.15433585e-03 ... -5.57388506e-03
   1.79181745e-03 -7.80642884e-04]
 [-2.38493715e-03 -5.21933797e-03 -9.48945459e-04 ...  6.03037599e-03
  -6.42301817e-03  1.73644111e-03]
 [-3.21127136e-03 -8.48576512e-04 -4.52617410e-03 ...  4.98780918e-03
   7.01177001e-03 -4.81267717e-03]
 ...
 [-5.52444734e-03 -7.17098009e-03 -5.82981809e-04 ... -4.19563720e-03
   9.65496855e-04  5.66745974e-03]
 [-5.94729363e-03 -4.63549398e-03 -6.13413889e-03 ... -5.95934312e-03
   1.10359239e-03  1.75542112e-03]
 [ 4.96705545e-03 -3.31718164e-03  4.97570533e-03 ... -4.65003221e-03
  -1.04575414e-05 -2.96681417e-03]]
networks.xnet.input_layer.xlayer.bias: torch.Size([256]) torch.float64 
[ 4.67266361e-03 -6.29078543e-03  7.13969329e-03  7.15200544e-03
 -4.39839094e-03  3.85769522e-03 -3.65512670e-03  7.15064546e-03
  1.72484311e-03 -1.79193265e-04 -5.27257584e-03  7.24440382e-03
 -4.90817144e-03 -2.49353161e-03 -6.70006585e-03 -4.44639784e-03
 -2.27470572e-03 -3.53007734e-03 -3.11426292e-03 -1.12755591e-03
  5.03942198e-03 -2.44235142e-03  6.55212538e-03  6.32980858e-03
 -1.57210894e-03  7.08440005e-03  2.30583092e-03 -9.11111505e-04
 -3.22715566e-03  3.75586839e-03 -7.05071649e-03 -6.62089408e-03
  6.69931030e-03 -4.14915232e-03 -5.08321092e-03 -5.44592283e-03
  3.63929943e-03  4.75151922e-03 -3.59719235e-03  1.66565673e-04
 -3.99413236e-03  2.01957963e-03  6.72871798e-03 -4.06463398e-03
 -2.71655089e-03 -6.95745871e-04  2.92878125e-03  5.91689712e-03
 -3.92982612e-03 -4.92802371e-03  5.73511698e-03  1.36843349e-03
 -3.66029021e-03 -2.26523206e-03  5.37545122e-03  4.42669052e-04
 -3.45517144e-03 -6.10719835e-03 -5.18331726e-03  4.85859231e-03
 -5.39951547e-03 -5.33025551e-03 -3.27854404e-03  3.15829437e-03
 -4.21222535e-05 -2.45063652e-03  1.05727844e-03 -5.96038311e-03
 -3.49812717e-03  6.41953829e-03 -3.56705829e-03  2.45192601e-03
  3.05302765e-03  6.23796366e-03 -6.37413504e-03  6.31104026e-03
 -3.13100280e-03  1.79336803e-03 -5.75271538e-03 -1.74504563e-04
  3.65905448e-03 -6.36414312e-03 -4.40801688e-03 -1.66373553e-03
  4.29374908e-03 -6.08242225e-04 -7.56757467e-04  3.00201835e-06
 -5.31756164e-03 -6.16205809e-03 -4.67539506e-03 -5.90001989e-03
 -7.21750846e-03 -1.44104458e-03 -1.18837843e-03  5.11260734e-03
  3.92358705e-03 -5.88184375e-03 -6.36190970e-03  1.86261226e-03
  6.13193522e-04  3.18348206e-03 -2.20368600e-03 -5.86343454e-03
  4.10340706e-03 -4.65208466e-03 -2.96552405e-04 -3.23823289e-03
  3.09562331e-03  1.03461025e-03  7.22075916e-03 -7.15884864e-03
 -2.95532158e-03 -6.59380547e-03 -2.78810580e-03 -1.96579744e-03
 -3.57623037e-03 -1.02130686e-03  4.34831096e-03  6.27582161e-03
  5.21463273e-03  3.78037436e-03 -7.32540172e-03  4.78089072e-03
 -4.35551108e-03  3.39163411e-03 -3.87971495e-03  2.68035168e-03
  7.22271850e-03  1.43213463e-03 -2.09173899e-03 -6.81048510e-03
 -6.21299799e-03 -6.62533700e-03 -3.96995993e-03 -1.00821958e-03
 -7.03195393e-03  2.13243126e-03 -5.17623214e-03  1.37617710e-03
 -7.02375559e-03  3.87611656e-03  3.99896888e-03 -7.69986664e-04
 -2.80949430e-03 -4.78136982e-03 -5.23030687e-03 -5.12970155e-03
 -5.55213540e-03  3.59358778e-03  4.95613540e-03 -3.63036946e-03
 -1.30692977e-03 -6.32969187e-03 -4.27583252e-03  1.11269444e-03
 -5.81169183e-04  3.54849039e-03 -4.52910362e-03  5.43749155e-03
 -7.10151048e-03 -6.08545183e-03  3.46675529e-03  3.62010120e-03
 -4.09766390e-03 -1.18111611e-03  9.33002149e-04 -1.36231401e-03
 -3.67528577e-04  5.20838902e-03  3.52908767e-03  1.78270288e-03
  6.78167981e-03 -3.62968737e-03  6.44234000e-03 -1.86662683e-03
  3.70867283e-03  4.45013108e-03  5.99013596e-03  2.56483315e-03
  5.98166221e-03  5.09639864e-03 -2.91241240e-03 -7.00327979e-03
  4.80519285e-03 -4.62889331e-03  4.93732122e-04 -5.29679417e-03
  6.75604332e-05 -4.95914764e-03  6.07677967e-03 -7.09625886e-03
  2.59875788e-04 -1.60419537e-03 -1.44002693e-03 -4.23883675e-04
 -6.10140685e-03  1.99117098e-03 -6.80601982e-03 -7.11092664e-03
 -6.65774643e-03  6.06424507e-06 -3.47618939e-03  4.62510152e-03
 -6.69897993e-05 -9.45332275e-04 -4.03898889e-03  2.59456892e-03
  3.88170502e-03  8.63962494e-04 -1.11099488e-03 -1.84002763e-03
  2.19182410e-03 -6.69163827e-03  4.01671692e-03 -4.34242786e-03
  1.04367480e-03  5.16365452e-03  7.14892919e-03  2.59003338e-05
 -6.82793729e-03  4.51248230e-03  5.77277447e-03  3.51056978e-03
  4.32859336e-03 -2.49617228e-03 -1.48712913e-03  4.13463617e-03
  2.23283085e-03 -1.99101732e-03 -1.61429030e-03 -5.22780938e-03
 -6.28940477e-03  2.05563813e-03 -9.72701640e-04 -6.71854575e-03
 -4.14323561e-03 -5.54527982e-03  5.06180138e-04 -4.37237043e-03
 -5.98056859e-03 -5.04322466e-03 -4.15729048e-04  4.42789941e-03
  6.62189379e-03  4.23684577e-04  4.60035247e-03 -2.43330845e-03
  2.11279956e-03 -2.86604882e-03 -4.51252228e-03 -6.73178814e-04
 -4.46313084e-03 -6.30943541e-04 -3.51831998e-03 -7.19210737e-03]
networks.xnet.input_layer.vlayer.weight: torch.Size([256, 18432]) torch.float64 
[[-0.00254604 -0.00278928 -0.00291235 ... -0.00578555 -0.00434722
  -0.00614031]
 [ 0.00388633  0.00215724  0.00320443 ...  0.00234512  0.00470126
   0.00085752]
 [ 0.00147137  0.00394903 -0.00540817 ...  0.00174974  0.00033785
  -0.00144348]
 ...
 [-0.00297102 -0.00559173  0.00140852 ...  0.00730386 -0.00175809
   0.00563609]
 [ 0.0041033  -0.0043227  -0.00428448 ... -0.00392522 -0.00388147
   0.00573322]
 [-0.00639863  0.0055535   0.00543229 ...  0.00726398 -0.00658535
  -0.00667838]]
networks.xnet.input_layer.vlayer.bias: torch.Size([256]) torch.float64 
[-3.91276703e-03 -1.17598550e-03  4.96050785e-03  1.63338292e-03
 -2.80893539e-03 -2.35955478e-03  3.13016621e-03  4.13400383e-03
  7.03288649e-03  9.06435742e-04 -4.27261825e-03 -3.38483995e-03
  6.17717158e-03  1.97265781e-03 -5.67568478e-03  4.87516238e-03
 -4.85221385e-03 -5.80183076e-03 -3.77911628e-03 -3.60652048e-04
  7.07772212e-03 -5.67415157e-03  6.86336845e-05  4.64003993e-03
 -2.83343798e-04 -2.61194650e-03 -3.57995591e-04  6.14546049e-03
 -4.13085215e-03  3.47346099e-03  5.05824724e-03 -5.62729864e-03
  4.02721311e-03  1.21761440e-03  3.99339480e-03 -1.50364788e-03
 -7.16502175e-03 -1.14459729e-03  5.26048556e-03  3.44099144e-03
 -5.10686629e-03 -3.40816931e-03  1.64131805e-03  1.57127976e-03
  2.24686260e-03 -3.33687293e-03 -3.90434433e-06  3.42513958e-03
 -4.60240771e-03 -4.68162851e-03  2.52454694e-04 -4.23801025e-03
  3.95800563e-03 -3.10224804e-04  6.86744539e-03 -2.47747808e-03
 -7.09201363e-04 -6.05789905e-03 -4.40629310e-03 -3.70083215e-03
  5.57406493e-03  6.34149511e-03 -8.84931522e-04  1.71409025e-03
  5.31975556e-04  7.25651801e-03 -5.69614958e-03  2.62588039e-03
 -6.49584564e-03  6.71976970e-03 -2.43175749e-03  4.15712134e-03
  6.58838876e-03 -2.61406415e-03  6.95914613e-03  6.34183429e-03
  5.33172299e-04  6.55493401e-03  3.40645693e-04 -3.44132273e-03
 -4.04667420e-03 -5.28898740e-03  1.11883524e-03 -3.52076624e-05
  5.02886808e-03 -1.35616392e-03  7.13373620e-03 -6.64791785e-03
 -6.90459506e-03  4.44830850e-03 -3.15165394e-03 -1.83602217e-03
 -3.13041789e-03  7.08766790e-03  7.24629087e-03 -2.73206074e-04
  1.58837385e-03  4.95337041e-03  4.12064635e-03 -3.96137072e-03
  3.41468375e-03  1.08906541e-03 -3.04098889e-03 -5.31406113e-03
 -3.06989963e-03 -1.58336092e-05 -2.25468434e-04  5.42413852e-03
  1.82822515e-03 -2.52263916e-03 -2.14287429e-03  5.98856726e-03
 -3.20931976e-03  3.40446512e-03  9.94737033e-04 -1.26072325e-03
 -3.30884886e-03  3.41089945e-04 -2.99010415e-03 -6.13427139e-03
 -3.49097477e-03 -1.40227832e-03  2.25610543e-03  5.05488475e-03
  2.80261002e-03 -4.50405840e-03  2.85804142e-03 -4.74460397e-03
 -5.06737244e-03 -4.12293476e-03 -6.48276208e-03 -4.41462361e-03
  4.89863792e-03 -5.80149475e-03  1.05126787e-03 -2.40830269e-03
 -5.81096071e-03  7.02962989e-03 -4.28883891e-04  2.10644783e-03
 -1.77205767e-04  2.10706536e-04  6.87341702e-03  3.91376011e-04
  2.00109902e-03 -3.08409920e-03 -1.35995385e-03 -2.94802030e-03
 -1.76460065e-03  3.13984946e-03 -8.99725564e-04  5.66444448e-03
 -1.29944765e-04 -5.01570201e-03  2.84922173e-03 -3.49077907e-03
 -2.92317830e-03  2.74442251e-03 -1.73426839e-03  5.09839164e-03
  1.00217434e-03  1.66646546e-03  7.30890049e-03 -3.72079573e-03
 -5.08690093e-03 -6.40174608e-03  3.16661321e-03 -1.02905765e-03
 -4.58417322e-03  1.90226332e-03 -2.10686406e-03  3.89679615e-03
 -1.91049276e-04  7.17973853e-03 -4.58551418e-03 -1.13172583e-03
 -2.87052769e-03  5.92010039e-03 -4.54367903e-03  6.17724626e-03
  4.73912225e-03 -3.51409638e-03 -4.18133814e-03  5.22887242e-03
  6.43745388e-03  1.46565261e-03  1.22308830e-03 -6.61691915e-03
 -2.73735311e-03 -3.36730106e-03 -5.62589267e-03  2.05333057e-03
  4.82290625e-03  3.81187665e-03  4.59914560e-03  5.90815698e-03
 -2.54091590e-03 -1.25006096e-03  4.20316306e-03  5.82640799e-03
 -3.71720648e-03  5.84638626e-03  5.33624274e-03 -4.96135849e-03
  1.54394603e-04  2.88796212e-03 -6.00267524e-05  6.76462975e-03
 -4.82465107e-04  7.29041386e-03  5.25660522e-03 -3.63138797e-03
  6.80430993e-04  2.51235438e-03 -3.99580540e-03 -3.64944868e-04
 -3.98263990e-03 -6.80885308e-04  5.11840864e-03 -3.95947483e-03
  3.27626388e-03  8.49196075e-04 -4.33028622e-03  6.34849646e-04
  6.74858675e-03  5.13442885e-03 -2.30689101e-03 -3.52799405e-05
 -2.66032106e-03  6.14158455e-03 -6.71421925e-03  3.70372185e-03
 -8.80092985e-04 -4.69004557e-03 -2.60871530e-03  6.30938607e-03
 -7.20900519e-03 -2.02787335e-03 -2.76136027e-03  5.29217356e-03
  2.69742528e-03 -4.61752582e-03  4.85430084e-03 -3.27192450e-03
 -4.55044111e-03  3.50388685e-03 -5.81673386e-03 -7.07770399e-03
  3.95305393e-03  2.14292256e-05  4.47597649e-03 -7.15197567e-04
 -1.24280408e-03 -7.12359491e-03  8.66424707e-04 -6.34940171e-03]
networks.xnet.scale.coeff: torch.Size([1, 9216]) torch.float64 
[[0. 0. 0. ... 0. 0. 0.]]
networks.xnet.scale.layer.weight: torch.Size([9216, 256]) torch.float64 
[[-0.0594191   0.01023839 -0.01642843 ...  0.02865324 -0.00835422
   0.01432848]
 [-0.00367336  0.04322275 -0.01313253 ... -0.00758469  0.04962381
  -0.02137671]
 [-0.05213886  0.05805194 -0.05870177 ... -0.03555754 -0.03608946
   0.03223866]
 ...
 [ 0.00978376  0.03277219  0.03125306 ... -0.01213493  0.02497264
   0.02712005]
 [-0.04797464 -0.00898904 -0.0555607  ... -0.00384776  0.05544567
   0.05605885]
 [-0.04344975  0.01386949 -0.02800783 ... -0.04308637 -0.00690665
   0.05309356]]
networks.xnet.scale.layer.bias: torch.Size([9216]) torch.float64 
[ 0.04638944  0.00601816 -0.03689309 ... -0.00662351  0.02382634
  0.03719499]
networks.xnet.transf.coeff: torch.Size([1, 9216]) torch.float64 
[[0. 0. 0. ... 0. 0. 0.]]
networks.xnet.transf.layer.weight: torch.Size([9216, 256]) torch.float64 
[[ 0.01539758  0.04767374 -0.01417824 ... -0.04416167 -0.0235656
  -0.05860819]
 [-0.04470275  0.01469557 -0.05933459 ...  0.0256917  -0.04784798
   0.00878948]
 [ 0.05516211  0.0558832   0.02651854 ... -0.03526568  0.01714258
  -0.00812102]
 ...
 [-0.03800518  0.01588555 -0.02908312 ...  0.01620687  0.01743512
  -0.05140136]
 [-0.03599462  0.01158062 -0.0355959  ... -0.06143294  0.05641674
   0.02155656]
 [ 0.04564517 -0.02036414 -0.02284707 ...  0.0486985  -0.0500677
   0.01800617]]
networks.xnet.transf.layer.bias: torch.Size([9216]) torch.float64 
[ 0.02214304 -0.0248871  -0.04064743 ...  0.04895277 -0.0540264
 -0.04864673]
networks.xnet.transl.weight: torch.Size([9216, 256]) torch.float64 
[[-0.06082837  0.03174185 -0.02388697 ... -0.00573172 -0.04801249
   0.05144302]
 [-0.03308586  0.02617982 -0.01357712 ... -0.019656    0.01857523
   0.0491785 ]
 [ 0.05427809  0.03436538 -0.00246441 ...  0.0562963  -0.03219095
  -0.0106154 ]
 ...
 [ 0.04850355 -0.02226856 -0.02468239 ...  0.02588419 -0.01297717
   0.02921589]
 [-0.04976576 -0.01415489  0.05224991 ... -0.01780911  0.0582653
  -0.02075991]
 [-0.03997122 -0.03527536 -0.02285578 ... -0.00559565 -0.03951269
   0.00532229]]
networks.xnet.transl.bias: torch.Size([9216]) torch.float64 
[ 0.03607683  0.00784024  0.06195948 ...  0.00617823 -0.04639402
  0.01582899]
networks.vnet.input_layer.xlayer.weight: torch.Size([256, 8192]) torch.float64 
[[ 2.44633236e-03  9.58688121e-03  1.74121561e-03 ... -5.20722230e-03
   2.27899234e-03  1.29554802e-03]
 [-4.40934643e-03 -8.87187218e-03  7.65910766e-03 ...  8.85029445e-04
   6.35505056e-03 -3.04758362e-05]
 [ 5.20253041e-03  5.68573397e-03 -4.68286435e-03 ... -9.74898609e-04
   7.39016692e-03 -8.93215131e-03]
 ...
 [ 9.79812382e-03  9.12002604e-04  5.49392316e-03 ...  7.99143520e-03
  -6.23000041e-03  2.49364061e-03]
 [-2.78947519e-03 -5.80557939e-03  7.62215532e-03 ... -8.38436327e-03
   4.42829608e-03 -5.18048910e-03]
 [-1.01273220e-02 -1.08776329e-02  8.61698386e-03 ...  7.79826596e-04
  -6.63867827e-03 -4.85730625e-04]]
networks.vnet.input_layer.xlayer.bias: torch.Size([256]) torch.float64 
[-3.25579656e-03 -1.01013963e-02 -6.03674943e-03 -4.69601192e-03
  1.49679478e-03  3.24643683e-03 -2.55313000e-03 -1.30133155e-03
 -6.00647292e-03  1.50289400e-03 -1.09643890e-02 -7.92536325e-03
 -4.05708043e-03 -6.25246438e-03 -6.93915551e-03  3.72249793e-03
 -2.88818004e-03  2.94942961e-03  6.36964058e-03 -5.41864332e-03
  1.38816459e-03  9.65118782e-03 -4.41971884e-03 -1.04163615e-02
  5.33374964e-04 -8.74732680e-03  1.48442523e-03 -6.91259622e-03
 -1.48266292e-03 -2.33182238e-03  9.69206033e-04  5.38213551e-03
 -4.74636104e-04 -3.08369019e-03 -7.35652583e-03  4.49914528e-03
  1.00216132e-02 -5.72484129e-03  4.10610066e-04 -9.94447235e-03
  5.05749665e-04  3.16887848e-03  1.07353308e-02 -8.13347204e-03
 -7.04384928e-03 -3.17367437e-03 -8.23377061e-03 -9.68759775e-03
  7.56921743e-03  7.79222591e-03 -8.87051663e-03  1.05318084e-02
  3.05057780e-03 -8.12454268e-03 -6.41310295e-04  9.77658111e-04
 -9.27858872e-03  7.59273498e-03 -4.12161127e-03 -3.58097550e-03
 -7.88149874e-03 -8.39233693e-03 -9.80717122e-03 -8.61895567e-03
  8.14498160e-03  9.90781092e-03 -1.05873799e-02 -8.68962130e-03
  2.94368590e-03 -5.34239359e-03  6.98230353e-03  5.70314272e-03
  8.01595178e-03 -3.85751152e-03  2.97688223e-03 -2.43718649e-03
 -7.95458449e-04  5.92916456e-03 -5.44484186e-03 -6.79100965e-03
 -5.11761110e-03  5.39756881e-03  4.29328360e-03 -8.33582338e-03
  5.24330099e-03  5.10051024e-03  8.87856809e-04 -2.51313876e-03
  9.99520686e-03 -6.18765387e-03  1.08235269e-02  5.42349092e-03
 -1.03917399e-02 -8.50303613e-03  1.51455622e-03  2.76043264e-03
  5.10198234e-03 -8.19117994e-03  6.18909906e-03 -6.71834269e-03
  7.56365732e-03  2.64001403e-03 -5.27788009e-03  2.30538382e-03
 -6.40889398e-03  9.02337593e-04 -2.42065327e-04  6.13697490e-03
  6.43672755e-04 -5.78307371e-03  2.36774682e-03 -1.03781893e-03
  8.64609280e-03  8.71027171e-03  2.39222166e-03  2.54229878e-03
 -1.39614795e-03 -3.65060018e-03 -3.16434267e-03 -5.01712355e-03
  6.13169033e-03  6.16626841e-03  9.01711855e-03 -1.07415678e-02
 -9.23578151e-03  4.97852285e-03 -2.90890279e-03 -4.74747768e-04
  1.77761227e-03 -3.62291632e-03 -7.54772343e-03 -4.03855207e-03
 -1.80264951e-03  8.41767908e-03 -2.93249234e-03 -7.53410742e-03
  2.93477950e-03  9.58248807e-03 -9.02567801e-03 -3.98274923e-03
  2.08961437e-03 -7.65562061e-03 -8.85128992e-03 -7.98301857e-03
  6.38895264e-03 -5.47432656e-03 -3.26618716e-03 -9.82707592e-03
 -1.09100579e-02 -1.58503128e-03  1.66976975e-03  4.43511164e-03
  4.78537023e-03  1.00070326e-02  5.26785391e-03  1.05330059e-02
 -4.59809226e-03 -3.68778281e-03 -6.18570485e-04  7.58081850e-04
 -3.82507784e-03 -5.08582371e-03 -1.54613344e-03  9.86957518e-04
 -1.53813016e-03  2.59956364e-03  8.30273450e-03  4.37101531e-03
 -5.27340671e-03 -9.52544619e-03 -2.68288431e-03 -9.07922997e-03
 -1.08143741e-03 -1.18887595e-03 -1.05603190e-02 -8.73038347e-03
  3.52163844e-03 -1.06530734e-02 -1.75421903e-03  3.86421261e-03
  7.16418188e-03 -7.47025370e-03 -5.51606926e-03  8.84861613e-03
  8.96144199e-03  1.08637682e-02  6.24003453e-03 -8.67199830e-03
 -4.06011222e-03  7.45170547e-03 -3.29378793e-04 -7.47554106e-03
  4.77099871e-03  7.60871096e-03  2.30678439e-04 -3.82906719e-03
 -1.62723413e-03 -2.44894389e-03  1.03797716e-03  9.33206540e-03
 -4.50748145e-03 -2.02642479e-03  3.50302204e-03 -2.10228957e-04
  1.31278709e-04  4.63739307e-03  1.09217305e-02  2.74385286e-03
  7.20997114e-03 -5.38801749e-03  3.76675231e-03  7.19155572e-03
 -8.40171410e-03  3.30603256e-03  8.00469837e-03  2.93443785e-04
  5.05477880e-03  3.72745080e-03  9.92075250e-03  7.77037365e-04
 -4.28402847e-03  1.90875501e-03 -1.05542827e-02  7.20134169e-03
 -2.44935715e-03  9.85689207e-04 -9.53618931e-04  9.50163985e-03
 -3.26690531e-03  6.42550476e-03  7.43392400e-03 -5.39948207e-03
 -6.80515649e-03 -4.44056206e-03  1.12784920e-04 -7.64954899e-03
 -9.30084590e-03  5.59277216e-03 -9.16449178e-03  2.33288452e-04
  9.52864564e-03 -4.67138955e-03 -4.45628946e-03 -4.10988267e-04
  1.00880828e-02 -6.97775527e-03 -5.49280249e-03  2.80102610e-03
  2.11126830e-03 -2.57421212e-04 -9.12323137e-04 -8.38444651e-03
 -8.15755444e-03  5.86975398e-03  3.98098606e-03 -5.53728031e-06]
networks.vnet.input_layer.vlayer.weight: torch.Size([256, 8192]) torch.float64 
[[-0.00748024  0.00459881  0.00465722 ...  0.00888237 -0.00675414
   0.00664083]
 [ 0.00842813 -0.00719914 -0.00579696 ...  0.00789821 -0.00383181
   0.00093512]
 [ 0.00539934 -0.01025359  0.00292647 ...  0.00446687 -0.01001312
  -0.00596549]
 ...
 [-0.00497839  0.01026974 -0.00043234 ...  0.00266668 -0.01042327
  -0.00817117]
 [-0.00320871  0.00407346 -0.00135596 ... -0.00893072  0.00634529
  -0.01011484]
 [-0.00984863 -0.00805651 -0.00736318 ...  0.00446534  0.00961841
  -0.01011041]]
networks.vnet.input_layer.vlayer.bias: torch.Size([256]) torch.float64 
[-2.83035008e-03  7.27180673e-03 -4.67520117e-03  4.01517466e-03
  1.04616054e-02  1.04744287e-03 -4.11220503e-03 -8.71348105e-03
  4.22444607e-03  7.18406711e-04 -1.08218953e-02 -6.05564940e-03
  6.45192467e-03 -7.41992809e-03  1.06638191e-02 -4.39410207e-03
  3.35406625e-03  4.38425005e-03 -7.01792514e-03 -2.82010539e-03
 -4.80825017e-03  2.14120790e-03  1.06294537e-02 -3.84590818e-03
 -6.08153714e-03 -3.55456702e-03  4.50568201e-03 -9.70647853e-03
  7.34061303e-03 -4.31281379e-04  8.98371160e-03  1.30293360e-03
 -7.74473103e-03  1.08890793e-02 -1.09339929e-02  5.43899475e-03
  9.34509750e-03  6.81876900e-03  3.29439916e-03 -1.10180356e-02
 -8.62024876e-03  8.44778358e-03  5.43890248e-03 -4.13199405e-03
  4.26198725e-03 -7.30083066e-03 -1.31427994e-03  7.37868093e-03
 -6.90592754e-03 -7.68913136e-04 -1.06742339e-02  7.64045406e-03
 -8.64960697e-03 -6.15111119e-03 -6.40303475e-03 -1.05044446e-02
  3.94900973e-03  7.08668650e-03 -5.98975922e-03 -1.09879154e-02
 -6.96458134e-03 -2.57949173e-03  7.43389565e-03  1.15633513e-03
 -1.42366975e-03  4.88657247e-03  6.75884303e-03  7.02818306e-03
  2.19508882e-03  1.24324333e-03  6.37759924e-03 -6.50744902e-03
  7.18210083e-03 -1.74527977e-03 -1.10854633e-03  9.00511491e-03
 -9.89572490e-03 -5.66966553e-03  6.62781365e-03 -1.09731930e-02
  6.70019652e-03  9.52561217e-03 -7.29929846e-03  7.47925961e-03
 -9.69857190e-03 -4.64457713e-03  4.31963524e-03 -1.49014150e-03
  1.02679711e-02 -6.90876114e-03 -9.47520430e-03  1.08142141e-02
 -2.83619335e-03 -5.66733096e-03  1.12410472e-03 -7.96596720e-03
  5.90620480e-04 -4.53357322e-03 -8.76034043e-03 -7.41497245e-03
 -8.60527230e-03  1.60752727e-03  7.76441185e-03 -2.49488532e-03
 -7.96992721e-03 -1.01370583e-02  5.66996818e-03  1.26212957e-03
 -5.66994086e-03 -1.02111606e-02 -1.01443787e-02  8.71572721e-03
 -9.08144222e-03  2.64842779e-03 -8.93110247e-03 -2.84173681e-03
 -5.60146139e-03 -2.87049710e-04 -3.91232184e-05 -2.19023019e-03
  2.88706811e-03  1.02766115e-02 -5.91072253e-03 -5.83912458e-04
 -8.85492761e-03 -1.48151990e-03 -1.48288353e-03 -1.36702415e-03
  7.71104383e-03 -6.59143896e-03  5.43993639e-04 -4.29419169e-03
 -4.94590081e-03 -5.12144621e-03  6.75861400e-03 -6.76583466e-03
  1.64464531e-03  3.08153047e-03 -3.99253949e-03  7.46300482e-03
  9.01838907e-03  7.94927746e-04  1.22143212e-03 -5.56039918e-03
 -8.12493071e-03 -4.03781117e-03  4.76355997e-03  1.38823610e-03
 -6.30479683e-03 -9.85798690e-04 -4.64272400e-03  1.96873968e-04
  8.41763859e-03 -1.04137293e-02 -4.47215641e-04 -3.42223992e-03
 -3.59981353e-03  1.14285433e-03 -1.67283373e-03  1.09016260e-02
  4.30742094e-03  3.57469437e-03  3.52695142e-03 -9.16117421e-03
 -4.01556697e-03 -9.43906988e-03  3.95682474e-04  6.36553087e-03
  4.57441995e-03  6.21668185e-03  7.15869012e-03  1.11194790e-03
  9.49773109e-03  4.17087770e-03 -2.26865729e-03 -7.22704632e-03
  4.33022652e-03 -3.60124551e-03  7.46236119e-03 -3.12264620e-03
 -4.74694897e-03 -5.15712468e-03 -2.52250937e-03 -1.02556194e-02
 -7.91363480e-04 -1.04448828e-02 -4.16722654e-03  3.18288036e-03
 -7.58363644e-03  5.47179739e-03  4.86657808e-03  9.79391505e-03
 -1.09174765e-02 -2.85669617e-03  9.34462603e-03 -8.40433233e-03
  1.86970783e-03  8.03093533e-03  3.02526815e-03  9.92889968e-03
  8.76226217e-03  5.67837366e-03  7.59101092e-03  1.00592417e-02
  6.02836634e-03  8.84388242e-03  2.68773361e-03  7.60402856e-03
 -4.86442634e-03 -8.61582498e-04 -8.41937640e-03 -9.56783467e-03
  1.07182707e-02 -5.80120998e-03  9.08112292e-04 -3.70876420e-03
 -9.99614490e-03  9.12579315e-03  9.14717181e-03 -9.62331813e-03
 -5.56478306e-03  1.08718264e-02  1.15427082e-03  8.93998649e-03
  4.79593376e-03  1.27600523e-03 -1.34384913e-03 -6.10298135e-03
  6.28123197e-04 -4.57527049e-03 -5.64034891e-04 -5.74636729e-03
 -2.92195070e-03  9.91126879e-03  7.09978978e-03 -4.98347869e-03
 -2.11628028e-04 -8.74606525e-03  8.88190461e-03  4.59493143e-03
  5.63472135e-04  5.93154851e-03 -7.86215514e-03 -1.03262729e-02
 -8.19697030e-03  1.00308030e-02 -6.27189874e-03 -7.94739105e-03
 -6.50357814e-03 -6.10228917e-03  5.98859251e-03 -1.83853014e-03
  3.88985151e-03  1.05646983e-02  1.06540319e-02 -6.55636098e-03]
networks.vnet.scale.coeff: torch.Size([1, 9216]) torch.float64 
[[0. 0. 0. ... 0. 0. 0.]]
networks.vnet.scale.layer.weight: torch.Size([9216, 256]) torch.float64 
[[-0.0211792   0.01360612 -0.01895423 ...  0.05311411  0.0303243
   0.01934684]
 [ 0.05392502 -0.05292606  0.02971697 ...  0.04740279 -0.01602265
   0.03325339]
 [ 0.02150624  0.00540851  0.04215835 ... -0.0116221   0.03956344
  -0.03162053]
 ...
 [ 0.01436092 -0.00757652 -0.00925584 ... -0.01513149  0.0260258
   0.00904203]
 [ 0.00691328  0.01730746 -0.02075534 ...  0.04103487 -0.04194295
  -0.04294283]
 [-0.00413409 -0.02861339 -0.01642515 ...  0.01660827 -0.01023394
  -0.05735686]]
networks.vnet.scale.layer.bias: torch.Size([9216]) torch.float64 
[-0.04615599  0.0218543   0.05632434 ...  0.05646791  0.03091398
  0.02782508]
networks.vnet.transf.coeff: torch.Size([1, 9216]) torch.float64 
[[0. 0. 0. ... 0. 0. 0.]]
networks.vnet.transf.layer.weight: torch.Size([9216, 256]) torch.float64 
[[-0.03027565 -0.05439916  0.04951974 ...  0.06178862  0.00190744
   0.01774788]
 [-0.01594073 -0.02391274 -0.02079861 ... -0.01737575  0.04941283
   0.02506094]
 [-0.02078581  0.00512239  0.03599004 ...  0.06186521  0.02556314
  -0.06008499]
 ...
 [ 0.0022767   0.05334353 -0.05320856 ... -0.01550338  0.05970897
  -0.00374976]
 [ 0.02945715  0.02967477  0.05977853 ... -0.02381523  0.02225031
  -0.04655354]
 [-0.00959657 -0.01332176  0.05359353 ... -0.03942653 -0.01370504
   0.05291501]]
networks.vnet.transf.layer.bias: torch.Size([9216]) torch.float64 
[ 4.08399257e-02 -3.76670171e-02  8.43116791e-03 ...  3.90931648e-02
 -8.44490760e-03 -9.29754024e-05]
networks.vnet.transl.weight: torch.Size([9216, 256]) torch.float64 
[[ 0.00433255 -0.02497033  0.05023563 ...  0.04761612 -0.03073068
  -0.03430707]
 [ 0.05609064 -0.01828104  0.01056407 ...  0.00559933 -0.01237723
  -0.03514199]
 [-0.04184505 -0.02188827 -0.00452426 ... -0.02832249  0.05317846
  -0.06208485]
 ...
 [-0.00265456  0.03002125 -0.03231925 ... -0.05301782  0.00681614
  -0.00917102]
 [ 0.05866198  0.04632618 -0.00759648 ... -0.00639913  0.00027154
   0.04514867]
 [ 0.02405452  0.02119173 -0.05547384 ... -0.02657769  0.01623083
   0.03446882]]
networks.vnet.transl.bias: torch.Size([9216]) torch.float64 
[-0.04169058  0.02620383 -0.0442881  ... -0.01138426  0.04772879
  0.01129767]
xeps.0: torch.Size([]) torch.float64 
0.01
xeps.1: torch.Size([]) torch.float64 
0.01
xeps.2: torch.Size([]) torch.float64 
0.01
xeps.3: torch.Size([]) torch.float64 
0.01
veps.0: torch.Size([]) torch.float64 
0.01
veps.1: torch.Size([]) torch.float64 
0.01
veps.2: torch.Size([]) torch.float64 
0.01
veps.3: torch.Size([]) torch.float64 
0.01
[2025-07-24 16:56:46][I][pytorch/trainer:2009:l2hmc.trainers.pytorch.trainer] --------------------------------------------------------------------------------
[2025-07-24 16:56:46][I][ipykernel_8749/3178487732:2:ezpz.log] ExperimentConfig(wandb={'setup': {'id': None, 'group': None, 'config': None, 'save_code': True, 'sync_tensorboard': True, 'mode': 'online', 'resume': 'allow', 'entity': 'l2hmc-qcd', 'project': 'l2hmc-qcd', 'settings': {'start_method': 'thread'}, 'tags': ['beta_init=6.0', 'beta_final=6.0']}}, steps=Steps(nera=1, nepoch=10, test=50, log=1, print=1, extend_last_era=1), framework='pytorch', loss=LossConfig(use_mixed_loss=True, charge_weight=0.0, rmse_weight=0.1, plaq_weight=0.1, aux_weight=0.0), network=NetworkConfig(units=[256], activation_fn='tanh', dropout_prob=0.0, use_batch_norm=False), conv=ConvolutionConfig(filters=[], sizes=[], pool=[]), net_weights=NetWeights(x=NetWeight(s=0.0, t=1.0, q=1.0), v=NetWeight(s=1.0, t=1.0, q=1.0)), dynamics=DynamicsConfig(nchains=8, group='SU3', latvolume=[4, 4, 4, 4], nleapfrog=4, eps=0.01, eps_hmc=0.25, use_ncp=True, verbose=True, eps_fixed=False, use_split_xnets=False, use_separate_networks=False, merge_directions=True), learning_rate=LearningRateConfig(lr_init=0.0001, mode='auto', monitor='loss', patience=5, cooldown=0, warmup=1000, verbose=True, min_lr=1e-06, factor=0.98, min_delta=0.0001, clip_norm=1.0), annealing_schedule=AnnealingSchedule(beta_init=6.0, beta_final=6.0, dynamic=False), gradient_accumulation_steps=1, restore=False, save=False, c1=0.0, port='2345', compile=True, profile=False, init_aim=False, init_wandb=False, use_wandb=False, use_tb=False, debug_mode=False, default_mode=True, print_config=True, precision='float32', ignore_warnings=True, backend='DDP', seed=9992, ds_config_path='/Users/samforeman/projects/saforem2/l2hmc-qcd/src/l2hmc/conf/ds_config.yaml', name=None, width=200, nchains=None, compression=False)

HMC

xhmc, history_hmc = evaluate(
    nsteps=50,
    exp=ptExpSU3,
    beta=6.0,
    x=state.x,
    eps=0.1,
    nleapfrog=8,
    job_type='hmc',
    nlog=1,
    nprint=50,
    grab=True
)
[2025-07-24 16:56:46][I][pytorch/experiment:117:l2hmc.experiment.pytorch.experiment] Running 50 steps of hmc at beta=6.0000
[2025-07-24 16:56:46][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 0
[2025-07-24 16:56:46][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 1
[2025-07-24 16:56:46][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 2
[2025-07-24 16:56:47][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 3
[2025-07-24 16:56:47][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 4
[2025-07-24 16:56:47][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 5
[2025-07-24 16:56:47][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 6
[2025-07-24 16:56:48][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 7
[2025-07-24 16:56:48][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 8
[2025-07-24 16:56:48][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 9
[2025-07-24 16:56:48][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 10
[2025-07-24 16:56:48][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 11
[2025-07-24 16:56:49][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 12
[2025-07-24 16:56:49][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 13
[2025-07-24 16:56:49][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 14
[2025-07-24 16:56:50][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 15
[2025-07-24 16:56:50][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 16
[2025-07-24 16:56:50][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 17
[2025-07-24 16:56:51][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 18
[2025-07-24 16:56:51][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 19
[2025-07-24 16:56:51][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 20
[2025-07-24 16:56:51][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 21
[2025-07-24 16:56:52][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 22
[2025-07-24 16:56:52][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 23
[2025-07-24 16:56:52][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 24
[2025-07-24 16:56:53][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 25
[2025-07-24 16:56:53][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 26
[2025-07-24 16:56:53][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 27
[2025-07-24 16:56:54][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 28
[2025-07-24 16:56:54][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 29
[2025-07-24 16:56:54][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 30
[2025-07-24 16:56:55][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 31
[2025-07-24 16:56:55][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 32
[2025-07-24 16:56:56][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 33
[2025-07-24 16:56:56][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 34
[2025-07-24 16:56:56][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 35
[2025-07-24 16:56:57][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 36
[2025-07-24 16:56:57][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 37
[2025-07-24 16:56:57][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 38
[2025-07-24 16:56:57][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 39
[2025-07-24 16:56:58][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 40
[2025-07-24 16:56:58][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 41
[2025-07-24 16:56:58][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 42
[2025-07-24 16:56:58][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 43
[2025-07-24 16:56:59][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 44
[2025-07-24 16:56:59][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 45
[2025-07-24 16:56:59][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 46
[2025-07-24 16:56:59][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 47
[2025-07-24 16:57:00][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 48
[2025-07-24 16:57:00][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 49
dataset_hmc = history_hmc.get_dataset()
_ = history_hmc.plot_all(title='HMC')

xhmc = ptExpSU3.trainer.dynamics.unflatten(xhmc)
print(f"checkSU(x_eval): {g.checkSU(xhmc)}")
print(f"checkSU(x_eval): {g.checkSU(g.projectSU(xhmc))}")
checkSU(x_eval): (tensor[8] f64 x∈[2.220e-16, 5.489e-16] μ=4.132e-16 σ=1.407e-16 [3.037e-16, 5.489e-16, 3.201e-16, 
5.396e-16, 2.916e-16, 5.397e-16, 2.220e-16, 5.402e-16], tensor[8] f64 x∈[8.040e-16, 1.757e-15] μ=1.264e-15 
σ=4.239e-16 [9.343e-16, 1.757e-15, 9.437e-16, 1.529e-15, 8.222e-16, 1.734e-15, 8.040e-16, 1.589e-15])
checkSU(x_eval): (tensor[8] f64 x∈[2.127e-16, 3.218e-16] μ=2.884e-16 σ=3.627e-17 [2.857e-16, 3.218e-16, 2.788e-16, 
3.113e-16, 2.687e-16, 3.149e-16, 2.127e-16, 3.137e-16], tensor[8] f64 x∈[7.849e-16, 9.910e-16] μ=8.971e-16 
σ=8.549e-17 [8.916e-16, 9.910e-16, 8.248e-16, 9.339e-16, 7.849e-16, 9.878e-16, 7.966e-16, 9.666e-16])

Training

import time
from l2hmc.utils.history import BaseHistory, summarize_dict

history_train = BaseHistory()
x = state.x
for step in range(100):
    # log.info(f'HMC STEP: {step}')
    tic = time.perf_counter()
    x, metrics_ = ptExpSU3.trainer.train_step(
        (x, state.beta)
    )
    toc = time.perf_counter()
    metrics = {
        'train_step': step,
        'dt': toc - tic,
        **metrics_,
    }
    if step % 5 == 0:
        avgs = history_train.update(metrics)
        summary = summarize_dict(avgs)
        logger.info(summary)
[2025-07-24 16:57:07][I][ipykernel_8749/30352159:21:ezpz.log] train_step=0 dt=3.215 energy=43.378 logprob=43.212 logdet=0.166 sldf=0.085 sldb=-0.052 sld=0.166 xeps=0.010 veps=0.010 acc=0.088 sumlogdet=0.000 beta=6.000 acc_mask=0.000 loss=186.887 plaqs=0.001 sinQ=0.000 intQ=0.003 dQint=0.000 dQsin=0.000
[2025-07-24 16:57:19][I][ipykernel_8749/30352159:21:ezpz.log] train_step=5 dt=2.420 energy=-291.052 logprob=-291.294 logdet=0.243 sldf=0.138 sldb=-0.115 sld=0.243 xeps=0.010 veps=0.010 acc=0.947 sumlogdet=0.087 beta=6.000 acc_mask=1.000 loss=-773.032 plaqs=0.037 sinQ=0.000 intQ=0.005 dQint=0.024 dQsin=0.002
[2025-07-24 16:57:30][I][ipykernel_8749/30352159:21:ezpz.log] train_step=10 dt=2.087 energy=-748.515 logprob=-748.776 logdet=0.261 sldf=0.162 sldb=-0.158 sld=0.261 xeps=0.011 veps=0.010 acc=1.000 sumlogdet=0.011 beta=6.000 acc_mask=1.000 loss=-677.595 plaqs=0.085 sinQ=0.001 intQ=0.017 dQint=0.011 dQsin=0.001
[2025-07-24 16:57:40][I][ipykernel_8749/30352159:21:ezpz.log] train_step=15 dt=1.877 energy=-1196.402 logprob=-1196.631 logdet=0.229 sldf=0.156 sldb=-0.182 sld=0.229 xeps=0.011 veps=0.010 acc=0.763 sumlogdet=-0.050 beta=6.000 acc_mask=0.750 loss=-261.805 plaqs=0.133 sinQ=0.002 intQ=0.026 dQint=0.008 dQsin=0.001
[2025-07-24 16:57:55][I][ipykernel_8749/30352159:21:ezpz.log] train_step=20 dt=4.560 energy=-1537.655 logprob=-1537.979 logdet=0.325 sldf=0.211 sldb=-0.228 sld=0.325 xeps=0.012 veps=0.010 acc=0.815 sumlogdet=-0.050 beta=6.000 acc_mask=0.750 loss=-241.241 plaqs=0.166 sinQ=0.002 intQ=0.033 dQint=0.013 dQsin=0.001
[2025-07-24 16:58:16][I][ipykernel_8749/30352159:21:ezpz.log] train_step=25 dt=5.056 energy=-1845.867 logprob=-1846.266 logdet=0.399 sldf=0.259 sldb=-0.281 sld=0.399 xeps=0.012 veps=0.010 acc=0.883 sumlogdet=-0.019 beta=6.000 acc_mask=0.875 loss=-275.480 plaqs=0.196 sinQ=0.001 intQ=0.017 dQint=0.015 dQsin=0.001
[2025-07-24 16:58:47][I][ipykernel_8749/30352159:21:ezpz.log] train_step=30 dt=4.821 energy=-2007.591 logprob=-2008.213 logdet=0.622 sldf=0.376 sldb=-0.357 sld=0.622 xeps=0.013 veps=0.010 acc=0.647 sumlogdet=0.029 beta=6.000 acc_mask=0.625 loss=-132.897 plaqs=0.220 sinQ=0.001 intQ=0.012 dQint=0.010 dQsin=0.001
[2025-07-24 16:59:05][I][ipykernel_8749/30352159:21:ezpz.log] train_step=35 dt=6.179 energy=-2254.809 logprob=-2255.430 logdet=0.621 sldf=0.374 sldb=-0.344 sld=0.621 xeps=0.013 veps=0.011 acc=0.574 sumlogdet=0.073 beta=6.000 acc_mask=0.625 loss=-105.081 plaqs=0.238 sinQ=-0.001 intQ=-0.009 dQint=0.010 dQsin=0.001
[2025-07-24 16:59:25][I][ipykernel_8749/30352159:21:ezpz.log] train_step=40 dt=4.264 energy=-2298.236 logprob=-2299.017 logdet=0.781 sldf=0.493 sldb=-0.510 sld=0.781 xeps=0.013 veps=0.011 acc=0.456 sumlogdet=0.010 beta=6.000 acc_mask=0.375 loss=91.055 plaqs=0.249 sinQ=-0.001 intQ=-0.018 dQint=0.008 dQsin=0.001
[2025-07-24 16:59:38][I][ipykernel_8749/30352159:21:ezpz.log] train_step=45 dt=2.303 energy=-2383.401 logprob=-2384.326 logdet=0.925 sldf=0.571 sldb=-0.567 sld=0.925 xeps=0.013 veps=0.011 acc=1.000 sumlogdet=0.059 beta=6.000 acc_mask=1.000 loss=-263.916 plaqs=0.265 sinQ=-0.000 intQ=-0.004 dQint=0.016 dQsin=0.001
[2025-07-24 16:59:49][I][ipykernel_8749/30352159:21:ezpz.log] train_step=50 dt=1.954 energy=-2637.445 logprob=-2638.384 logdet=0.939 sldf=0.596 sldb=-0.621 sld=0.939 xeps=0.013 veps=0.012 acc=0.789 sumlogdet=-0.098 beta=6.000 acc_mask=0.750 loss=-238.576 plaqs=0.283 sinQ=0.000 intQ=0.003 dQint=0.015 dQsin=0.001
[2025-07-24 16:59:59][I][ipykernel_8749/30352159:21:ezpz.log] train_step=55 dt=1.719 energy=-2780.312 logprob=-2781.590 logdet=1.278 sldf=0.777 sldb=-0.748 sld=1.278 xeps=0.013 veps=0.012 acc=0.879 sumlogdet=0.131 beta=6.000 acc_mask=0.875 loss=-233.818 plaqs=0.302 sinQ=0.002 intQ=0.027 dQint=0.013 dQsin=0.001
[2025-07-24 17:00:13][I][ipykernel_8749/30352159:21:ezpz.log] train_step=60 dt=3.844 energy=-2937.865 logprob=-2939.211 logdet=1.346 sldf=0.840 sldb=-0.851 sld=1.346 xeps=0.013 veps=0.013 acc=1.000 sumlogdet=0.012 beta=6.000 acc_mask=1.000 loss=-319.141 plaqs=0.321 sinQ=-0.000 intQ=-0.003 dQint=0.016 dQsin=0.001
[2025-07-24 17:00:29][I][ipykernel_8749/30352159:21:ezpz.log] train_step=65 dt=3.087 energy=-3120.763 logprob=-3122.055 logdet=1.291 sldf=0.831 sldb=-0.885 sld=1.291 xeps=0.013 veps=0.013 acc=1.000 sumlogdet=-0.155 beta=6.000 acc_mask=1.000 loss=-435.038 plaqs=0.340 sinQ=-0.000 intQ=-0.004 dQint=0.017 dQsin=0.001
[2025-07-24 17:00:57][I][ipykernel_8749/30352159:21:ezpz.log] train_step=70 dt=3.675 energy=-3374.210 logprob=-3375.497 logdet=1.287 sldf=0.796 sldb=-0.789 sld=1.287 xeps=0.013 veps=0.014 acc=1.000 sumlogdet=0.066 beta=6.000 acc_mask=1.000 loss=-481.912 plaqs=0.362 sinQ=-0.001 intQ=-0.019 dQint=0.013 dQsin=0.001
[2025-07-24 17:01:09][I][ipykernel_8749/30352159:21:ezpz.log] train_step=75 dt=2.762 energy=-3472.199 logprob=-3473.594 logdet=1.395 sldf=0.868 sldb=-0.872 sld=1.395 xeps=0.013 veps=0.014 acc=1.000 sumlogdet=0.047 beta=6.000 acc_mask=1.000 loss=-622.358 plaqs=0.380 sinQ=0.000 intQ=0.002 dQint=0.013 dQsin=0.001
[2025-07-24 17:01:19][I][ipykernel_8749/30352159:21:ezpz.log] train_step=80 dt=2.162 energy=-3703.436 logprob=-3704.769 logdet=1.333 sldf=0.817 sldb=-0.792 sld=1.333 xeps=0.012 veps=0.015 acc=1.000 sumlogdet=0.109 beta=6.000 acc_mask=1.000 loss=-598.133 plaqs=0.401 sinQ=0.001 intQ=0.016 dQint=0.011 dQsin=0.001
[2025-07-24 17:01:29][I][ipykernel_8749/30352159:21:ezpz.log] train_step=85 dt=1.962 energy=-3909.530 logprob=-3910.754 logdet=1.224 sldf=0.740 sldb=-0.695 sld=1.224 xeps=0.012 veps=0.015 acc=1.000 sumlogdet=0.198 beta=6.000 acc_mask=1.000 loss=-443.851 plaqs=0.423 sinQ=0.001 intQ=0.014 dQint=0.019 dQsin=0.001
[2025-07-24 17:01:43][I][ipykernel_8749/30352159:21:ezpz.log] train_step=90 dt=2.470 energy=-4057.845 logprob=-4059.013 logdet=1.167 sldf=0.743 sldb=-0.774 sld=1.167 xeps=0.012 veps=0.016 acc=1.000 sumlogdet=-0.073 beta=6.000 acc_mask=1.000 loss=-580.790 plaqs=0.441 sinQ=-0.001 intQ=-0.010 dQint=0.019 dQsin=0.001
[2025-07-24 17:01:55][I][ipykernel_8749/30352159:21:ezpz.log] train_step=95 dt=2.325 energy=-4236.244 logprob=-4237.616 logdet=1.372 sldf=0.822 sldb=-0.760 sld=1.372 xeps=0.012 veps=0.016 acc=1.000 sumlogdet=0.241 beta=6.000 acc_mask=1.000 loss=-779.546 plaqs=0.462 sinQ=-0.000 intQ=-0.002 dQint=0.014 dQsin=0.001
dataset_train = history_train.get_dataset()
_ = history_train.plot_all(
    title='Train',
    num_chains=x.shape[0],
)

Evaluation

# state = ptExpSU3.trainer.dynamics.random_state(6.0)
xeval, history_eval = evaluate(
    nsteps=50,
    exp=ptExpSU3,
    beta=6.0,
    # x=state.x,
    job_type='eval',
    nlog=1,
    nprint=50,
    grab=True,
)
[2025-07-24 17:02:12][I][pytorch/experiment:117:l2hmc.experiment.pytorch.experiment] Running 50 steps of eval at beta=6.0000
[2025-07-24 17:02:12][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 0
[2025-07-24 17:02:13][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 1
[2025-07-24 17:02:14][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 2
[2025-07-24 17:02:15][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 3
[2025-07-24 17:02:15][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 4
[2025-07-24 17:02:16][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 5
[2025-07-24 17:02:17][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 6
[2025-07-24 17:02:17][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 7
[2025-07-24 17:02:18][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 8
[2025-07-24 17:02:19][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 9
[2025-07-24 17:02:20][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 10
[2025-07-24 17:02:20][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 11
[2025-07-24 17:02:21][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 12
[2025-07-24 17:02:22][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 13
[2025-07-24 17:02:22][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 14
[2025-07-24 17:02:24][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 15
[2025-07-24 17:02:25][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 16
[2025-07-24 17:02:26][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 17
[2025-07-24 17:02:26][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 18
[2025-07-24 17:02:27][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 19
[2025-07-24 17:02:28][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 20
[2025-07-24 17:02:28][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 21
[2025-07-24 17:02:29][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 22
[2025-07-24 17:02:30][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 23
[2025-07-24 17:02:31][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 24
[2025-07-24 17:02:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 25
[2025-07-24 17:02:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 26
[2025-07-24 17:02:33][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 27
[2025-07-24 17:02:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 28
[2025-07-24 17:02:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 29
[2025-07-24 17:02:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 30
[2025-07-24 17:02:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 31
[2025-07-24 17:02:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 32
[2025-07-24 17:02:37][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 33
[2025-07-24 17:02:37][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 34
[2025-07-24 17:02:38][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 35
[2025-07-24 17:02:39][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 36
[2025-07-24 17:02:40][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 37
[2025-07-24 17:02:40][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 38
[2025-07-24 17:02:41][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 39
[2025-07-24 17:02:41][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 40
[2025-07-24 17:02:42][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 41
[2025-07-24 17:02:42][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 42
[2025-07-24 17:02:43][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 43
[2025-07-24 17:02:44][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 44
[2025-07-24 17:02:45][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 45
[2025-07-24 17:02:45][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 46
[2025-07-24 17:02:46][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 47
[2025-07-24 17:02:47][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 48
[2025-07-24 17:02:48][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 49
dataset_eval = history_eval.get_dataset()
_ = history_eval.plot_all(title='Eval')

xeval = ptExpSU3.trainer.dynamics.unflatten(xeval)
logger.info(f"checkSU(x_eval): {g.checkSU(xeval)}")
logger.info(f"checkSU(x_eval): {g.checkSU(g.projectSU(xeval))}")
[2025-07-24 17:02:55][I][ipykernel_8749/2193937887:2:ezpz.log] checkSU(x_eval): (tensor[8] f64 x∈[1.371e-16, 1.482e-16] μ=1.442e-16 σ=3.261e-18 [1.441e-16, 1.438e-16, 1.454e-16, 1.482e-16, 1.435e-16, 1.452e-16, 1.463e-16, 1.371e-16], tensor[8] f64 x∈[5.500e-16, 8.633e-16] μ=7.158e-16 σ=9.786e-17 [5.500e-16, 7.450e-16, 7.931e-16, 7.459e-16, 8.633e-16, 6.168e-16, 7.130e-16, 6.992e-16])
[2025-07-24 17:02:55][I][ipykernel_8749/2193937887:3:ezpz.log] checkSU(x_eval): (tensor[8] f64 x∈[1.350e-16, 1.491e-16] μ=1.421e-16 σ=5.421e-18 [1.396e-16, 1.464e-16, 1.404e-16, 1.440e-16, 1.491e-16, 1.351e-16, 1.473e-16, 1.350e-16], tensor[8] f64 x∈[5.135e-16, 7.909e-16] μ=6.914e-16 σ=9.388e-17 [7.909e-16, 7.453e-16, 7.450e-16, 5.135e-16, 7.644e-16, 6.084e-16, 6.504e-16, 7.130e-16])
import matplotlib.pyplot as plt
pdiff = dataset_eval.plaqs - dataset_hmc.plaqs
pdiff
import xarray as xr

fig, ax = plt.subplots(figsize=(12, 4))
(pdiff ** 2).plot(ax=ax)  #, robust=True)
ax.set_title(r"$\left|\delta U_{\mu\nu}\right|^{2}$ (HMC - Eval)")
outfile = Path(EVAL_DIR).joinpath('pdiff.svg')
#%xmode fig.savefig(outfile.as_posix(), dpi=400, bbox_inches='tight')
plt.show()

Citation

BibTeX citation:
@online{foreman2025,
  author = {Foreman, Sam},
  title = {🔳 `L2hmc-Qcd` {Example:} {4D} {SU(3)}},
  date = {2025-07-24},
  url = {https://samforeman.me/posts/jupyter/l2hmc-4dSU3/},
  langid = {en}
}
For attribution, please cite this work as:
Foreman, Sam. 2025. “🔳 `L2hmc-Qcd` Example: 4D SU(3).” July 24, 2025. https://samforeman.me/posts/jupyter/l2hmc-4dSU3/.