🔳 l2hmc-qcd Example: 4D SU(3)

LQCD
MCMC
HMC
SU3
l2hmc
Author
Affiliation
Published

July 24, 2024

Modified

January 8, 2026

# %matplotlib inline
import matplotlib_inline
matplotlib_inline.backend_inline.set_matplotlib_formats('svg')
import os
os.environ['COLORTERM'] = 'truecolor'
import lovely_tensors as lt
lt.monkey_patch()
lt.set_config(color=False)
# automatically detect and reload local changes to modules
%load_ext autoreload
%autoreload 2
import ezpz
import numpy as np
import matplotlib.pyplot as plt
from l2hmc.utils.plot_helpers import FigAxes
import ambivalent
plt.style.use(ambivalent.STYLES['ambivalent'])
#set_plot_style()
Using device: cpu
import ezpz
from pathlib import Path
from typing import Optional
from rich import print

import lovely_tensors as lt
import matplotlib.pyplot as plt
import numpy as np
import torch
import yaml

# from l2hmc.utils.dist import setup_torch
seed = np.random.randint(2 ** 32)
print(f"seed: {seed}")

_ = ezpz.setup_torch(seed=seed)
torch.set_default_dtype(torch.float64)
# _ = setup_torch(precision='float64', backend='DDP', seed=seed, port='1234')

logger = ezpz.get_logger()

import l2hmc.group.su3.pytorch.group as g
# from l2hmc.utils.rich import get_console
from l2hmc.common import grab_tensor, print_dict
from l2hmc.configs import dict_to_list_of_overrides, get_experiment
from l2hmc.experiment.pytorch.experiment import Experiment, evaluate  # noqa
seed: 1712078466
Manually specifying seed=1712078466
Using [1 / 1] available "mps" devices !!
[2025-12-31 11:26:25,467] [INFO] [real_accelerator.py:239:get_accelerator] Setting ds_accelerator to mps (auto detect)
[rank0]:W1231 11:26:25.645000 27083 torch/distributed/elastic/multiprocessing/redirects.py:29] NOTE: Redirects are currently not supported in Windows or MacOs.
from l2hmc.utils.plot_helpers import (  # noqa
    plot_scalar,
    plot_chains,
    plot_leapfrogs
)

def savefig(fig: plt.Figure, fname: str, outdir: os.PathLike):
    pngfile = Path(outdir).joinpath(f"pngs/{fname}.png")
    svgfile = Path(outdir).joinpath(f"svgs/{fname}.svg")
    pngfile.parent.mkdir(exist_ok=True, parents=True)
    svgfile.parent.mkdir(exist_ok=True, parents=True)
    fig.savefig(svgfile, transparent=True, bbox_inches='tight')
    fig.savefig(pngfile, transparent=True, bbox_inches='tight', dpi=300)

def plot_metrics(metrics: dict, title: Optional[str] = None, **kwargs):
    outdir = Path(f"./plots-4dSU3/{title}")
    outdir.mkdir(exist_ok=True, parents=True)
    for key, val in metrics.items():
        fig, ax = plot_metric(val, name=key, **kwargs)
        if title is not None:
            ax.set_title(title)
        console.log(f"Saving {key} to {outdir}")
        savefig(fig, f"{key}", outdir=outdir)
        plt.show()

def plot_metric(
        metric: torch.Tensor,
        name: Optional[str] = None,
        **kwargs,
):
    assert len(metric) > 0
    if isinstance(metric[0], (int, float, bool, np.floating)):
        y = np.stack(metric)
        return plot_scalar(y, ylabel=name, **kwargs)
    element_shape = metric[0].shape
    if len(element_shape) == 2:
        y = grab_tensor(torch.stack(metric))
        return plot_leapfrogs(y, ylabel=name)
    if len(element_shape) == 1:
        y = grab_tensor(torch.stack(metric))
        return plot_chains(y, ylabel=name, **kwargs)
    if len(element_shape) == 0:
        y = grab_tensor(torch.stack(metric))
        return plot_scalar(y, ylabel=name, **kwargs)
    raise ValueError

Load config + build Experiment

from rich import print

from l2hmc.configs import CONF_DIR
su3conf = Path(f"{CONF_DIR}/su3test.yaml")
with su3conf.open('r') as stream:
    conf = dict(yaml.safe_load(stream))
import json
from rich import print_json
print_json(json.dumps(conf, indent=4, sort_keys=True))
overrides = dict_to_list_of_overrides(conf)
{
  "annealing_schedule": {
    "beta_final": 6.0,
    "beta_init": 6.0
  },
  "backend": "DDP",
  "conv": "none",
  "dynamics": {
    "eps": 0.01,
    "eps_fixed": false,
    "group": "SU3",
    "latvolume": [
      4,
      4,
      4,
      4
    ],
    "merge_directions": true,
    "nchains": 8,
    "nleapfrog": 4,
    "use_separate_networks": false,
    "use_split_xnets": false,
    "verbose": true
  },
  "framework": "pytorch",
  "init_aim": false,
  "init_wandb": false,
  "learning_rate": {
    "clip_norm": 1.0,
    "lr_init": "1e-04"
  },
  "loss": {
    "aux_weight": 0.0,
    "charge_weight": 0.0,
    "plaq_weight": 0.1,
    "rmse_weight": 0.1,
    "use_mixed_loss": true
  },
  "net_weights": {
    "v": {
      "q": 1.0,
      "s": 1.0,
      "t": 1.0
    },
    "x": {
      "q": 1.0,
      "s": 0.0,
      "t": 1.0
    }
  },
  "network": {
    "activation_fn": "tanh",
    "dropout_prob": 0.0,
    "units": [
      256
    ],
    "use_batch_norm": false
  },
  "restore": false,
  "save": false,
  "steps": {
    "log": 1,
    "nepoch": 10,
    "nera": 1,
    "print": 1,
    "test": 50
  },
  "use_tb": false,
  "use_wandb": false
}
ptExpSU3 = get_experiment(overrides=[*overrides], build_networks=True)

# console.print(ptExpSU3.config)
state = ptExpSU3.trainer.dynamics.random_state(6.0)
logger.info(f"checkSU(state.x): {g.checkSU(state.x)}")
logger.info(f"checkSU(state.x): {g.checkSU(g.projectSU(state.x))}")
assert isinstance(state.x, torch.Tensor)
assert isinstance(state.beta, torch.Tensor)
assert isinstance(ptExpSU3, Experiment)
[2025-12-31 11:26:27,740564][I][utils/dist:229:setup_torch_DDP] Caught MASTER_PORT:55151 from environment!

[2025-12-31 11:26:27,768772][I][utils/dist:229:setup_torch_DDP] Caught MASTER_PORT:55151 from environment!

[2025-12-31 11:26:27,769587][W][pytorch/trainer:470:warning] Using torch.float32 on cpu!

[2025-12-31 11:26:27,923110][W][pytorch/trainer:470:warning] Using `torch.optim.Adam` optimizer

[2025-12-31 11:26:27,924258][I][pytorch/trainer:308:count_parameters] num_params in model: 27880456

[2025-12-31 11:26:27,976310][W][pytorch/trainer:274:__init__] logging with freq 1 for wandb.watch

[2025-12-31 11:26:27,987321][I][ipykernel_27083/1455121896:5:<module>] checkSU(state.x): (tensor[8] f64 x∈[1.194e-14, 2.768e-13] μ=5.945e-14 σ=8.948e-14 [1.612e-14, 4.656e-14, 2.151e-14, 2.791e-14, 2.768e-13, 1.396e-14, 1.194e-14, 6.084e-14], tensor[8] f64 x∈[2.157e-13, 8.730e-12] μ=1.710e-12 σ=2.876e-12 [2.870e-13, 1.121e-12, 5.530e-13, 8.653e-13, 8.730e-12, 2.157e-13, 3.150e-13, 1.589e-12])

[2025-12-31 11:26:27,993635][I][ipykernel_27083/1455121896:6:<module>] checkSU(state.x): (tensor[8] f64 x∈[2.674e-16, 2.908e-16] μ=2.781e-16 σ=8.187e-18 [2.799e-16, 2.843e-16, 2.674e-16, 2.908e-16, 2.722e-16, 2.683e-16, 2.798e-16, 2.821e-16], tensor[8] f64 x∈[8.691e-16, 9.443e-16] μ=9.076e-16 σ=2.875e-17 [9.138e-16, 9.269e-16, 8.691e-16, 9.443e-16, 8.911e-16, 8.903e-16, 9.437e-16, 8.816e-16])
# from l2hmc.utils.plot_helpers import set_plot_style
# set_plot_style()

from l2hmc.common import get_timestamp
TSTAMP = get_timestamp()
OUTPUT_DIR = Path(f"./outputs/pt4dSU3/{TSTAMP}")
HMC_DIR = OUTPUT_DIR.joinpath('hmc')
EVAL_DIR = OUTPUT_DIR.joinpath('eval')
TRAIN_DIR = OUTPUT_DIR.joinpath('train')
HMC_DIR.mkdir(exist_ok=True, parents=True)
EVAL_DIR.mkdir(exist_ok=True, parents=True)
TRAIN_DIR.mkdir(exist_ok=True, parents=True)
ptExpSU3.trainer.print_grads_and_weights()
logger.info(ptExpSU3.config)
#console.print(ptExpSU3.config)
[2025-12-31 11:26:28,108404][I][pytorch/trainer:2003:print_grads_and_weights] --------------------------------------------------------------------------------

[2025-12-31 11:26:28,109288][I][pytorch/trainer:2004:print_grads_and_weights] GRADS:

[2025-12-31 11:26:28,110016][I][l2hmc/common:97:print_dict] networks.xnet.input_layer.xlayer.weight: None None 

None

networks.xnet.input_layer.xlayer.bias: None None 

None

networks.xnet.input_layer.vlayer.weight: None None 

None

networks.xnet.input_layer.vlayer.bias: None None 

None

networks.xnet.scale.coeff: None None 

None

networks.xnet.scale.layer.weight: None None 

None

networks.xnet.scale.layer.bias: None None 

None

networks.xnet.transf.coeff: None None 

None

networks.xnet.transf.layer.weight: None None 

None

networks.xnet.transf.layer.bias: None None 

None

networks.xnet.transl.weight: None None 

None

networks.xnet.transl.bias: None None 

None

networks.vnet.input_layer.xlayer.weight: None None 

None

networks.vnet.input_layer.xlayer.bias: None None 

None

networks.vnet.input_layer.vlayer.weight: None None 

None

networks.vnet.input_layer.vlayer.bias: None None 

None

networks.vnet.scale.coeff: None None 

None

networks.vnet.scale.layer.weight: None None 

None

networks.vnet.scale.layer.bias: None None 

None

networks.vnet.transf.coeff: None None 

None

networks.vnet.transf.layer.weight: None None 

None

networks.vnet.transf.layer.bias: None None 

None

networks.vnet.transl.weight: None None 

None

networks.vnet.transl.bias: None None 

None

xeps.0: None None 

None

xeps.1: None None 

None

xeps.2: None None 

None

xeps.3: None None 

None

veps.0: None None 

None

veps.1: None None 

None

veps.2: None None 

None

veps.3: None None 

None

[2025-12-31 11:26:28,112821][I][pytorch/trainer:2006:print_grads_and_weights] --------------------------------------------------------------------------------

[2025-12-31 11:26:28,113314][I][pytorch/trainer:2007:print_grads_and_weights] WEIGHTS:

[2025-12-31 11:26:28,119057][I][l2hmc/common:97:print_dict] networks.xnet.input_layer.xlayer.weight: torch.Size([256, 18432]) torch.float64 

[[-0.005092    0.00446629  0.00064155 ... -0.00616387  0.0066668

  -0.00154245]

 [ 0.00323045 -0.00449038 -0.00498705 ... -0.00457053 -0.00100277

  -0.00342683]

 [ 0.00677997  0.00273203 -0.00237595 ...  0.00192968 -0.00523279

   0.00113126]

 ...

 [-0.00602117 -0.00016641  0.00553903 ...  0.00109397  0.00096947

  -0.0051946 ]

 [ 0.00581862  0.00476649 -0.00642881 ... -0.00270484  0.00304926

   0.00155475]

 [ 0.00092592  0.00513232 -0.00731611 ... -0.00349124  0.00596327

   0.00650193]]

networks.xnet.input_layer.xlayer.bias: torch.Size([256]) torch.float64 

[ 4.68115234e-04 -2.05326698e-04 -7.12439050e-03  4.68288458e-03

 -1.81849461e-03  5.33669621e-05  5.64609393e-03 -6.30171403e-03

  2.85506239e-03  2.41595204e-03  7.20452469e-03  6.35192023e-03

  6.67882230e-03  1.35024150e-03 -5.19228428e-03 -1.17629803e-03

 -5.98858861e-03 -5.15626922e-03 -4.01343688e-03 -4.22922154e-04

  6.67042818e-03  2.70708559e-03 -9.16444681e-04 -1.11322416e-03

 -5.35238477e-03 -2.66963234e-04 -6.22160001e-03  6.62352160e-03

 -3.69936844e-03 -4.25480598e-03 -6.91109157e-04  1.24797832e-03

  5.81982173e-03  3.80916343e-03 -4.53901537e-03 -7.21570094e-03

  3.92809501e-03 -3.87582215e-03  1.52762483e-03  6.73872829e-03

 -6.55811278e-03 -2.11921532e-03  3.00258628e-03  5.47959039e-03

  6.66273842e-03  3.14819458e-03 -3.92143620e-03  1.42412596e-03

 -5.59420480e-03  2.43350186e-03  1.87378470e-03  4.12806930e-05

  2.58439508e-03 -1.17534041e-03 -2.29528866e-03  5.30893203e-03

  1.11081946e-03  1.46969547e-04  3.80495627e-03  5.75824158e-03

  3.26691824e-03 -2.68695214e-03 -7.12038280e-03  4.81192402e-03

  4.33934779e-03  2.95981567e-04  4.05681151e-03  7.02707507e-03

 -2.93235949e-03  3.92231665e-04 -1.44381742e-03  4.86356112e-04

 -2.55558102e-03  5.60991249e-03  3.68932818e-03  6.78208054e-03

 -6.42080498e-03  5.19634260e-03 -3.94789424e-03  4.47583392e-03

  4.03356857e-03 -1.08290434e-03 -3.49113660e-03 -6.40213459e-04

  2.66118474e-03 -2.73919808e-03 -5.28109713e-04 -7.22739461e-03

  3.71166574e-03  6.28574838e-03  5.09119301e-03  4.29645388e-03

  3.12798263e-03  2.25218620e-03 -1.25531597e-03 -2.96219973e-03

  1.96105323e-03 -2.72303685e-03  3.98417901e-05  4.14954045e-03

 -5.66069588e-03  3.36865391e-03  1.92762717e-03 -6.79707296e-03

  4.60910479e-03 -2.38622478e-03  1.21520973e-03 -3.63234957e-03

  4.79087192e-03  2.65179593e-03 -2.05113983e-03 -3.06896951e-03

  4.74806241e-03 -1.16824210e-03 -7.16977443e-03  1.27682882e-03

  2.98554071e-03 -1.47668434e-03  4.76969415e-03 -1.27418914e-03

 -1.36216067e-03  4.16545656e-03  7.35409481e-03 -3.48851381e-03

  5.60324235e-03 -5.93832485e-03 -2.48318640e-03 -6.36281164e-03

 -1.46494828e-03 -4.25413227e-03  5.56230378e-03 -2.09709509e-03

  2.99558227e-03  1.91206968e-03  4.52664760e-03  1.72403192e-03

 -3.68511890e-03 -1.79997343e-03  4.36452332e-03  3.54743342e-03

  4.67708428e-03 -2.91010473e-03 -2.58109425e-03  6.33655079e-04

 -1.63153799e-03 -4.00755011e-03  5.23258956e-03 -5.97810296e-03

 -1.17369613e-04 -4.64624338e-03 -2.20567065e-04  3.08169152e-03

  3.97086513e-03  7.24998852e-03 -3.68047010e-03  3.65638577e-03

  5.94603942e-03 -3.23747625e-03 -1.73316301e-03 -7.16756967e-03

  7.35746958e-03  2.82200022e-04 -4.83845228e-03  4.09748677e-03

 -9.85604471e-04  7.02382659e-03 -5.20453913e-03  1.70009285e-03

  3.51357016e-03 -1.01341670e-03  6.20738716e-03  3.33457999e-03

 -4.17243411e-03 -3.61620448e-03 -2.85343937e-03 -4.51996335e-03

 -5.67783890e-03  1.96593516e-03 -3.04273539e-03  5.43896449e-03

  6.45133262e-04  2.02433178e-03 -3.24775853e-03 -1.66692270e-03

  3.43611781e-03 -9.94462534e-04  6.70718789e-03  3.60945157e-03

 -4.95979598e-03 -6.39456327e-03 -8.73548017e-04  4.02405706e-03

  5.19228203e-03  2.94894506e-03  5.49722722e-03 -6.72842701e-03

  3.45860098e-03  1.29014734e-03 -5.10575936e-03  2.58143516e-03

  3.41642148e-03 -7.23083729e-03  5.71391694e-03  6.02236912e-03

  1.14953023e-03 -3.68180503e-03  3.05009774e-03 -5.87502126e-03

  7.00962742e-03  3.89613302e-04 -3.56880139e-03 -4.73616620e-03

 -6.86958498e-03  2.61546808e-03  6.90949823e-05  5.80764456e-03

 -4.42402761e-03 -3.79551350e-03  6.19688739e-03  4.85125722e-03

 -1.78427917e-03  1.92470802e-03 -7.01509815e-03 -1.94891855e-03

 -1.28564733e-03 -5.64146898e-03 -4.71728264e-03 -3.77519752e-03

  3.44604589e-03 -4.85044111e-03 -6.80139776e-03 -4.00254657e-03

  1.95920787e-03 -7.07902446e-03 -5.52791284e-03 -7.04151159e-03

 -3.86845783e-03 -4.94541352e-03 -4.32449323e-03 -4.80546950e-03

 -6.55366101e-03  2.27071891e-03  4.28352359e-03  2.33092653e-03

  5.64862089e-03 -5.66142284e-03  3.38322252e-03 -5.66863464e-03

  3.72306156e-04  3.22372293e-03  2.31456245e-03  2.89721550e-03

  7.20513411e-03 -3.64859236e-03 -1.91954885e-03  3.04672103e-03]

networks.xnet.input_layer.vlayer.weight: torch.Size([256, 18432]) torch.float64 

[[-5.42508209e-03 -2.40978725e-03  2.93031501e-03 ...  2.74961026e-03

   6.60658633e-05  6.94622612e-03]

 [-5.18491999e-03  4.56551736e-03 -1.40244461e-03 ... -3.61061077e-03

   6.40389817e-03  2.16252357e-03]

 [-6.02914593e-04  4.32471121e-03  2.06180327e-03 ...  7.27168910e-04

  -6.16974895e-03 -3.54461927e-03]

 ...

 [ 1.65504410e-03 -1.94587071e-03  5.51064861e-03 ... -7.12983740e-04

  -5.40464613e-03  3.72851765e-03]

 [ 1.91617890e-03 -5.14425155e-03  5.60885873e-03 ...  3.33740428e-03

   2.14896783e-03 -4.59746249e-03]

 [-5.86668672e-03 -3.46815457e-03 -2.39764787e-03 ...  1.43904121e-04

  -4.26772539e-03  1.83584072e-03]]

networks.xnet.input_layer.vlayer.bias: torch.Size([256]) torch.float64 

[ 1.71765005e-03  3.85533676e-03 -4.15978583e-03 -3.43859808e-03

 -6.73546345e-03  4.50988608e-03  1.97496840e-03 -1.09711854e-04

  1.47390196e-03 -2.12268318e-03  5.56349830e-04 -3.58717221e-03

  2.64175852e-03 -2.55742795e-03  3.74455225e-03 -1.72605147e-03

 -2.25428054e-03  2.11214730e-03 -2.97798162e-03 -3.76175830e-03

 -6.00579426e-03  5.88274136e-03 -7.32519003e-03  2.89526578e-03

 -1.26790992e-03 -1.73224691e-03 -1.29392443e-03  5.29509062e-03

 -6.98725176e-03  2.45099621e-04  1.39831598e-03 -5.44536218e-03

 -1.06799939e-03  3.38041212e-03  7.34649488e-04  5.06845988e-03

 -3.10980811e-03 -3.89555445e-03 -1.76645368e-04 -6.39278421e-03

  4.87024345e-03  2.87439076e-03 -7.28077519e-03 -6.73540911e-03

 -2.05026949e-04  2.38988536e-05  3.02637702e-05 -2.62792059e-03

  4.03218587e-03  2.59549601e-03 -7.20400671e-04 -2.36006561e-03

  4.96805789e-03  5.69351725e-03 -4.61959602e-03 -5.40957456e-04

 -2.88818942e-03 -9.39393623e-04 -4.30402543e-03  5.40660806e-04

 -3.93700390e-03  4.71785799e-03 -6.88255096e-03  5.66890544e-03

  2.51432408e-03  5.32134179e-04 -1.13339204e-03 -3.77908289e-03

 -8.77998647e-04  2.64833526e-03  3.09548930e-03 -4.92035955e-03

  5.32931766e-03  7.18816330e-03  4.58499090e-03  7.01399459e-03

  6.32894464e-03  1.42886349e-03 -1.95893287e-03 -6.13905808e-03

 -2.83660838e-03 -2.06311939e-03 -6.08052310e-03  5.67833760e-03

 -5.60762799e-03 -8.70419508e-04  5.20454397e-04 -3.03636312e-03

 -1.20652454e-04 -4.57161087e-03 -6.80337051e-03  5.89683067e-03

  3.79689303e-03 -3.63728637e-03  4.56372582e-04 -3.44867333e-03

 -4.85932435e-03 -3.22238181e-03 -2.56928313e-03 -1.09202623e-03

  6.16456807e-03  4.42919159e-03  4.26201995e-03  7.28360134e-03

  9.52047053e-04 -3.81062054e-03 -1.37696924e-03  2.68982326e-03

  7.02733611e-03 -7.21624085e-03 -1.85815820e-03  5.74404494e-03

  5.32527998e-03  4.44025488e-03  4.50037255e-04 -5.26327405e-03

  5.68910723e-03  9.36640637e-04  4.05924903e-03 -3.43582920e-03

  1.04616625e-03  6.77791028e-03 -7.20629650e-03  5.07731867e-03

  5.95405371e-03  6.94112284e-03 -5.53155139e-03  6.78199542e-03

  6.58488901e-03  5.17089243e-03  3.74880774e-03 -8.73723679e-04

  1.03709862e-03  2.43499277e-03  4.51725226e-03  2.44026767e-03

  1.63916669e-03 -2.29783105e-03 -6.97562158e-03  2.83140290e-03

  7.31028355e-03  6.61472046e-03 -3.00837691e-03  4.34920815e-03

 -2.73912595e-03  6.59757155e-03  2.96845173e-03  5.08399325e-03

  5.69369410e-03  4.99803486e-03  6.63509829e-03 -1.06199372e-03

 -3.86580933e-03 -3.59535980e-03 -2.58177528e-03 -6.33358430e-03

  6.46824211e-03 -6.05124044e-03  9.41184132e-06 -6.65079943e-03

 -3.37770701e-04 -4.49618054e-04  5.24832238e-03 -3.16982725e-04

  3.74310799e-03 -3.75755169e-03 -1.88654963e-03  3.38583685e-03

  5.61846574e-03 -6.59563446e-03  3.48983848e-03  1.46964861e-03

  5.79755967e-03 -6.85520141e-03  4.34141988e-03  1.12972303e-03

  9.71525905e-04  6.95500113e-03 -5.11788062e-03  3.70238958e-03

  6.46965581e-03 -1.41397050e-03 -5.51584560e-03 -4.34479267e-03

 -6.96539187e-03 -3.12657666e-03 -1.14983323e-03 -2.54587653e-03

  6.51070429e-03 -4.87934442e-03 -3.65678129e-03 -7.71147655e-04

  5.76186577e-03  4.59826670e-04  5.04952100e-03 -4.09307629e-03

  4.68731277e-03 -3.55458790e-03 -5.59926575e-03 -1.85650930e-03

  2.42253747e-03 -6.10552048e-03  1.43657444e-03 -6.94837491e-03

  1.57214452e-03  3.68580875e-04  2.38173318e-03 -4.80349955e-03

 -4.81482030e-03 -2.20107037e-03 -4.30911305e-03  3.71202047e-03

  3.31670699e-03 -2.67866003e-03 -6.95575088e-03 -4.78188971e-03

 -8.50264291e-04  6.70527044e-03  4.24635750e-03  5.10828715e-03

 -4.29136180e-03 -6.99224096e-03 -3.40449638e-03  4.38539677e-03

 -4.69438742e-03 -1.31524736e-03 -7.11611025e-03  9.84388250e-04

 -4.89455912e-03 -1.13760551e-03 -9.43653383e-04 -6.90595973e-03

  9.68440089e-04  4.92765330e-03  4.39106260e-03 -5.37659698e-03

 -2.44045114e-04  4.08412519e-03 -4.51051679e-03  6.55967575e-03

  3.92755007e-03  3.31530900e-04 -4.91792554e-03 -4.61554728e-03

 -3.74033577e-03  4.35664139e-03  2.07284331e-03 -5.48444763e-03

  1.12973361e-03 -1.50031046e-03  3.10592658e-03  2.60719491e-03

  5.44995187e-03 -7.59479768e-05 -6.66263766e-03 -5.25892583e-03]

networks.xnet.scale.coeff: torch.Size([1, 9216]) torch.float64 

[[0. 0. 0. ... 0. 0. 0.]]

networks.xnet.scale.layer.weight: torch.Size([9216, 256]) torch.float64 

[[ 0.04735526 -0.00647193  0.02590768 ...  0.03472903  0.03672338

  -0.00576106]

 [ 0.02642173 -0.0271212   0.01041145 ...  0.03521164  0.04460096

  -0.05040249]

 [ 0.05956609  0.03312973 -0.05867425 ...  0.00138547 -0.00712019

  -0.01030411]

 ...

 [ 0.00173276  0.03036803  0.04773384 ... -0.00821764 -0.04360854

   0.03948124]

 [ 0.03450539 -0.03634485  0.04704601 ... -0.03107759  0.01765922

   0.03367604]

 [ 0.01160105  0.00885232 -0.00757807 ... -0.04054138  0.05096416

  -0.0264704 ]]

networks.xnet.scale.layer.bias: torch.Size([9216]) torch.float64 

[-0.03057832  0.03785151  0.00366509 ...  0.0089849  -0.0351477

 -0.04956403]

networks.xnet.transf.coeff: torch.Size([1, 9216]) torch.float64 

[[0. 0. 0. ... 0. 0. 0.]]

networks.xnet.transf.layer.weight: torch.Size([9216, 256]) torch.float64 

[[ 0.01252641  0.00852528  0.06125848 ... -0.02491388  0.00764917

   0.05575879]

 [ 0.03200603  0.04087287  0.02816969 ...  0.04855563  0.0022663

   0.04744288]

 [-0.01987566 -0.02528777 -0.00839785 ... -0.04193789 -0.05006504

   0.00113731]

 ...

 [-0.03540212 -0.04538986 -0.04277709 ... -0.02426559 -0.03607481

  -0.05487265]

 [ 0.05027464  0.03252979 -0.02714691 ... -0.03247234  0.0575658

  -0.0074307 ]

 [ 0.00886525 -0.02335944  0.01152225 ...  0.02442542  0.00145683

  -0.0201399 ]]

networks.xnet.transf.layer.bias: torch.Size([9216]) torch.float64 

[ 0.01300957 -0.06210055 -0.00435404 ... -0.05094867  0.01183965

  0.020364  ]

networks.xnet.transl.weight: torch.Size([9216, 256]) torch.float64 

[[ 0.0231374   0.06228965 -0.03393573 ...  0.05750693  0.00768673

  -0.01831028]

 [-0.01749938 -0.04131805  0.0020754  ...  0.01906807 -0.00148879

  -0.03321999]

 [ 0.02499263 -0.05080123  0.0485994  ...  0.05943031 -0.01056796

  -0.00987802]

 ...

 [-0.02165959 -0.05199979 -0.01989111 ...  0.00273207 -0.03445576

  -0.04331212]

 [-0.00157606 -0.0308753  -0.02052729 ...  0.05920839 -0.01748574

   0.02204532]

 [-0.02577153 -0.04315634  0.05299344 ...  0.03807786 -0.04482624

   0.03727961]]

networks.xnet.transl.bias: torch.Size([9216]) torch.float64 

[-0.0613981  -0.03142251  0.03007123 ...  0.04145251  0.00411679

 -0.00884496]

networks.vnet.input_layer.xlayer.weight: torch.Size([256, 8192]) torch.float64 

[[-0.0048561  -0.00348114 -0.00083025 ...  0.00762385 -0.00616286

  -0.00269964]

 [ 0.00597488 -0.00512168 -0.00244937 ...  0.00886372 -0.00553948

  -0.0034677 ]

 [-0.00667742 -0.00492465  0.0049909  ...  0.00224255  0.00754379

   0.00893879]

 ...

 [ 0.01014383 -0.00779847  0.00941168 ...  0.00796508 -0.00867012

   0.00083233]

 [ 0.00795688  0.00019186 -0.00621424 ...  0.01033643  0.00867863

   0.00063669]

 [ 0.00741313  0.00079471 -0.00485601 ...  0.00670414 -0.00330595

   0.01068464]]

networks.vnet.input_layer.xlayer.bias: torch.Size([256]) torch.float64 

[-0.00014791 -0.00345238 -0.00270097 -0.00868501  0.00571552  0.00059223

  0.00562049  0.00381136  0.01097283 -0.00653173  0.00300678 -0.00686054

  0.01081337 -0.00114015 -0.00742509  0.00755398  0.00500935  0.00413832

 -0.00611651 -0.00299395 -0.00595371 -0.00691864  0.00412982  0.00067502

 -0.00335038  0.00937188 -0.00621682 -0.00044558 -0.00544082  0.00837236

 -0.0041653   0.00495363  0.00295855  0.00131083 -0.00571366 -0.00932078

 -0.00969902  0.00938285 -0.00842801 -0.00524056 -0.0078905  -0.00423355

  0.00830045  0.00694676  0.00881673  0.01099668  0.01081933 -0.01087086

  0.00303598 -0.00612165 -0.00807497  0.01081934  0.01018091 -0.00508088

 -0.00591336 -0.00173671  0.0046791  -0.00988865  0.00357477 -0.00127219

 -0.00478061  0.00165579 -0.00524537  0.00435108  0.00636042  0.0019792

 -0.00398356 -0.00706946 -0.00979657  0.00406914  0.00701571 -0.00665959

  0.00529482 -0.00238198  0.00969652  0.01059534  0.00894953 -0.00138081

 -0.00734301  0.0008311   0.00263681 -0.00092159  0.00154712 -0.00753913

  0.00352526 -0.00655621 -0.00379389 -0.00076112  0.00594169  0.0041256

 -0.00126569 -0.00035956  0.00074132  0.01077519  0.00251071  0.00291417

  0.01047112 -0.00336315 -0.0059264   0.00728616  0.00370283  0.00956635

  0.00839141 -0.0003815  -0.00136509  0.00910668  0.00219956  0.01035767

  0.00555513 -0.00991755  0.00032908 -0.00329615  0.0027403   0.00210766

 -0.00389303  0.00099641 -0.00256405  0.01016023 -0.00080787 -0.00303903

 -0.00926098 -0.01014235 -0.01018096 -0.00593798 -0.00033303 -0.00542576

  0.00981422 -0.0092332   0.00733216 -0.00952133  0.00215293 -0.0051111

  0.00084095 -0.00334334  0.01032933  0.00071751 -0.00815601 -0.00416079

  0.00403898 -0.0055423  -0.00864691 -0.01068118 -0.00660865  0.0087469

 -0.00439231 -0.00860119 -0.00898834  0.00607433  0.00300307  0.00925614

 -0.0095756  -0.00739853  0.00591273 -0.00418007  0.00621943  0.00675857

  0.00851221  0.00263476  0.00751535 -0.00679087  0.00054051  0.00459647

 -0.00510942 -0.0102792   0.00312073  0.00393546 -0.00635761  0.00271505

  0.00843764  0.00330031 -0.00839175 -0.00917281 -0.00331777 -0.00397232

  0.00317288 -0.00914885  0.00539228 -0.00223304 -0.00014685 -0.00301919

 -0.00100774 -0.00600294 -0.00828292 -0.00155497  0.00135892 -0.00813124

  0.00643364  0.00399535  0.00347538  0.00328834  0.00721089 -0.00029701

  0.00847045  0.00438138  0.01013793  0.00716208  0.00268512  0.01015212

 -0.00021077  0.00870698  0.00457467 -0.00796168  0.00913949  0.00291346

 -0.00830869 -0.00843538 -0.0025222   0.00331931  0.00305444  0.01033865

  0.00132227 -0.00952947 -0.00572798  0.00988122  0.00908053  0.00692258

  0.00464418  0.00823961 -0.00284811  0.00513648  0.00581071  0.00369162

 -0.00263336  0.00913424 -0.01067217 -0.00419356 -0.01090762  0.00701422

 -0.01018246 -0.00335196  0.00408171 -0.00438232  0.00125953  0.00899385

 -0.00867723 -0.00277757  0.01101452  0.00034321  0.00835278  0.00328748

 -0.00847841 -0.00270676 -0.00946849  0.00271702  0.00629477  0.01025866

  0.00698702  0.00383642  0.00132134  0.00799858  0.00487717  0.00589601

 -0.00348762 -0.00953506 -0.00406139 -0.0026082 ]

networks.vnet.input_layer.vlayer.weight: torch.Size([256, 8192]) torch.float64 

[[ 0.00848334  0.00354643  0.00712743 ...  0.00929387  0.00689615

   0.0085438 ]

 [ 0.0094734  -0.01034193  0.00305949 ... -0.00623977  0.00779414

   0.01102226]

 [-0.00154188  0.01036338 -0.00579213 ... -0.00988287 -0.00247924

   0.00626616]

 ...

 [ 0.00929498 -0.00207267  0.00580651 ...  0.00242209 -0.00567774

   0.00674056]

 [-0.00777194  0.0061033  -0.00086574 ...  0.00858838  0.00404456

   0.00224889]

 [-0.00356198 -0.00848224  0.00996007 ...  0.00700523 -0.00829688

  -0.00460378]]

networks.vnet.input_layer.vlayer.bias: torch.Size([256]) torch.float64 

[-2.87727112e-03  2.55530058e-03  1.16936785e-03  6.51925949e-03

  3.24777863e-03  5.69860633e-03  2.78135628e-03 -3.58780054e-03

  1.06996405e-02  1.58897665e-03  9.00259478e-03 -1.10246485e-02

  6.27015606e-03  5.48020574e-03  9.85562010e-03 -4.17854571e-03

 -8.02479061e-03  3.74209201e-03 -7.42448790e-03  2.63326926e-03

 -1.00104401e-02 -8.27084103e-03  1.41311380e-03  2.44894816e-03

 -7.11261147e-04 -9.20445836e-03 -4.52189077e-03 -9.75872903e-03

 -9.58513315e-03  1.05001960e-02 -2.51485281e-03  5.89899749e-03

  3.89013524e-03  8.22483782e-03 -6.43996736e-03 -1.51479229e-03

 -3.94152494e-03 -1.04394872e-02 -8.38918814e-03 -4.57466640e-03

 -1.05475247e-02 -5.89717754e-03  1.02370841e-02  9.48836810e-03

 -4.14027272e-03 -4.34826614e-03  5.80606353e-03 -1.88359357e-03

 -7.86081692e-03 -7.88711755e-03  9.15387538e-03  8.59969859e-04

  3.30610830e-03 -8.68462283e-03  8.24390528e-03 -1.04053688e-02

  6.78432982e-03  8.51361863e-03 -3.43704365e-03  9.57831486e-03

  3.97400151e-04 -6.39437429e-03 -1.52821023e-03 -7.25103724e-03

 -5.58311574e-03 -1.07577259e-02  5.52073060e-03  7.36185682e-03

  4.76513549e-03  8.89075130e-03 -4.44126008e-03  2.24867642e-03

  9.77579554e-03 -9.37448331e-03  9.35785004e-03 -5.72437719e-03

 -1.00001085e-02  3.28374944e-03 -5.51418211e-03 -8.21846533e-03

  6.00202851e-03 -8.24926324e-03  3.21800398e-03 -8.84134509e-03

  9.96533854e-03 -6.02038680e-03  9.88845067e-03 -3.86225901e-03

  8.75641031e-03 -6.63294279e-03 -9.27381629e-03  6.53085732e-03

  6.65376822e-03  1.06698211e-02 -2.15199925e-03  2.67496055e-03

  3.35147679e-03  1.05919557e-02 -1.04411482e-02 -1.94833771e-04

 -8.74449394e-03  3.53132227e-03 -2.77380940e-03 -1.00120490e-02

 -5.82240986e-03  6.63436634e-03 -1.09135736e-02  8.28584434e-04

 -4.56678451e-03 -8.39572331e-04  1.20785370e-03  7.63319202e-03

 -1.85093361e-03  1.04270558e-02  1.10154019e-02  3.10525978e-03

 -8.58898539e-03 -6.27410190e-03 -4.39211195e-03 -7.92911898e-04

  7.66695361e-03 -1.75222571e-03 -1.00122959e-02 -3.42811691e-03

  7.19039936e-03 -8.01293308e-03 -8.42248782e-04  1.04278431e-02

  4.52132064e-03 -7.54080411e-03 -9.15302548e-03  1.02461777e-03

  7.56942886e-03 -6.42146388e-03 -6.94645458e-03 -6.45404671e-03

  4.74196900e-04 -7.61353176e-03 -3.61712327e-04  2.96133457e-03

 -7.89100925e-03 -5.94209458e-03  7.74125667e-03  6.01400757e-04

  3.71431730e-03 -9.83220430e-04  3.55838644e-06 -1.54352140e-03

  6.71178596e-03  4.33032122e-03  5.66251807e-03 -7.59103618e-04

 -9.63698260e-03 -7.63918105e-03  3.81510829e-03  1.00701806e-02

 -5.00575175e-03 -6.20245285e-03  6.10889414e-03  6.19754091e-03

  9.08540150e-03 -6.31419832e-03 -2.25951303e-03 -2.63463520e-03

  5.32724730e-04  1.88211462e-03  7.62390812e-03 -4.26234022e-03

 -5.45884507e-03  9.92629061e-03  4.69959025e-03 -1.04965504e-03

  9.13459131e-03  5.20214784e-04  4.81321280e-03  2.37366656e-03

 -3.74181294e-04  6.96578240e-03  9.84007526e-03 -9.56474450e-03

 -8.14754702e-03 -6.95304802e-03  9.48387384e-03  1.02154341e-02

  1.09475211e-03 -4.72445484e-03 -6.72042740e-03  6.07101367e-03

  8.45316334e-03  1.16232903e-03  8.95125674e-03 -7.00462434e-03

 -1.02272872e-02  1.25939267e-03 -6.06839890e-04  4.21045258e-03

  1.08127572e-02 -8.08444523e-03  9.85691189e-04  7.17010284e-03

 -6.70057718e-03  1.08744678e-02 -1.03639613e-02 -9.01827203e-03

 -1.99989603e-03  1.19798958e-03 -1.01511489e-02  4.70948643e-04

 -7.60576511e-03 -1.02675378e-02  1.72403354e-03  3.88054462e-03

  4.20249792e-03 -3.51598351e-03 -3.60840853e-03 -6.33657438e-03

 -1.07427968e-02  9.53755315e-03  2.23254139e-03 -6.55580726e-03

 -7.76140716e-04  4.21748450e-03  1.10116815e-02  5.31289206e-03

 -6.12722005e-03 -1.82609895e-03  5.67772779e-03 -1.13935792e-03

  5.52833526e-03  8.34448501e-03 -4.29112536e-03 -4.85131452e-03

  1.68964957e-03  3.11632528e-03 -4.21887772e-03  8.89702651e-03

 -9.96274517e-03  4.84512768e-03  9.61124899e-03 -4.92279814e-03

  7.47827284e-03 -8.37496791e-03  6.74592565e-03 -1.08600430e-02

 -4.89012555e-03  5.14098588e-03 -2.63811285e-03 -1.09504230e-02

  1.07283655e-02 -5.38991556e-04  8.93446019e-03  9.52103109e-03

  3.10984962e-03 -5.04060320e-03 -2.19366092e-03  4.26732324e-03]

networks.vnet.scale.coeff: torch.Size([1, 9216]) torch.float64 

[[0. 0. 0. ... 0. 0. 0.]]

networks.vnet.scale.layer.weight: torch.Size([9216, 256]) torch.float64 

[[-0.02993077  0.02543806 -0.00644304 ... -0.05921393 -0.05908895

   0.01354392]

 [-0.06148589 -0.04977359 -0.03319144 ... -0.00521602  0.0053774

  -0.04898748]

 [ 0.04045278 -0.03701918  0.05013376 ... -0.04341798 -0.00223266

  -0.06005228]

 ...

 [ 0.06107889 -0.061839    0.05191018 ...  0.04938358 -0.05530376

  -0.0238295 ]

 [ 0.01884635  0.00061796  0.02712458 ...  0.01254982 -0.00502974

  -0.02666392]

 [ 0.01242274  0.04352741 -0.01981083 ...  0.03581944 -0.05042464

  -0.04659961]]

networks.vnet.scale.layer.bias: torch.Size([9216]) torch.float64 

[ 0.02491976 -0.02458543  0.00860941 ... -0.04368608 -0.01910094

 -0.03452589]

networks.vnet.transf.coeff: torch.Size([1, 9216]) torch.float64 

[[0. 0. 0. ... 0. 0. 0.]]

networks.vnet.transf.layer.weight: torch.Size([9216, 256]) torch.float64 

[[-0.02025562 -0.03779622 -0.06118175 ...  0.0361354  -0.00637488

   0.03643653]

 [-0.01633398 -0.03769166 -0.05997198 ... -0.03023229 -0.00811874

   0.03344756]

 [-0.03880926  0.00702888 -0.05021485 ... -0.00205564 -0.03645954

   0.06134575]

 ...

 [ 0.056627    0.00190657  0.03815508 ... -0.00884655 -0.04658737

   0.05848368]

 [ 0.00708258  0.00844212 -0.05402353 ...  0.03169199 -0.00481458

  -0.00086864]

 [-0.03888053  0.00468361 -0.03281466 ...  0.00034762  0.02575012

  -0.04780153]]

networks.vnet.transf.layer.bias: torch.Size([9216]) torch.float64 

[ 0.01004734 -0.01753179  0.00080507 ...  0.05718709 -0.04688783

  0.00542994]

networks.vnet.transl.weight: torch.Size([9216, 256]) torch.float64 

[[-0.0385665   0.02053248 -0.0428989  ... -0.01126615  0.02928948

   0.05796018]

 [-0.03448675 -0.04814161 -0.00204561 ... -0.01045331  0.04091052

   0.00299955]

 [ 0.06193251 -0.01544001 -0.04660926 ...  0.04132764  0.05764067

  -0.03348621]

 ...

 [-0.02438582 -0.01695365 -0.01471718 ... -0.02198697 -0.04449339

  -0.00915993]

 [-0.00116912 -0.02059368 -0.03359362 ... -0.02826244 -0.01653995

   0.00136683]

 [ 0.05905199  0.01036461  0.05629757 ... -0.05866871 -0.05245123

   0.00685946]]

networks.vnet.transl.bias: torch.Size([9216]) torch.float64 

[-0.02382432 -0.05869707  0.02771799 ... -0.05022913  0.05177271

  0.01605222]

xeps.0: torch.Size([]) torch.float64 

0.01

xeps.1: torch.Size([]) torch.float64 

0.01

xeps.2: torch.Size([]) torch.float64 

0.01

xeps.3: torch.Size([]) torch.float64 

0.01

veps.0: torch.Size([]) torch.float64 

0.01

veps.1: torch.Size([]) torch.float64 

0.01

veps.2: torch.Size([]) torch.float64 

0.01

veps.3: torch.Size([]) torch.float64 

0.01

[2025-12-31 11:26:28,144379][I][pytorch/trainer:2009:print_grads_and_weights] --------------------------------------------------------------------------------

[2025-12-31 11:26:28,145087][I][ipykernel_27083/3178487732:2:<module>] ExperimentConfig(wandb={'setup': {'id': None, 'group': None, 'config': None, 'save_code': True, 'sync_tensorboard': True, 'mode': 'online', 'resume': 'allow', 'entity': 'l2hmc-qcd', 'project': 'l2hmc-qcd', 'settings': {'start_method': 'thread'}, 'tags': ['beta_init=6.0', 'beta_final=6.0']}}, steps=Steps(nera=1, nepoch=10, test=50, log=1, print=1, extend_last_era=1), framework='pytorch', loss=LossConfig(use_mixed_loss=True, charge_weight=0.0, rmse_weight=0.1, plaq_weight=0.1, aux_weight=0.0), network=NetworkConfig(units=[256], activation_fn='tanh', dropout_prob=0.0, use_batch_norm=False), conv=ConvolutionConfig(filters=[], sizes=[], pool=[]), net_weights=NetWeights(x=NetWeight(s=0.0, t=1.0, q=1.0), v=NetWeight(s=1.0, t=1.0, q=1.0)), dynamics=DynamicsConfig(nchains=8, group='SU3', latvolume=[4, 4, 4, 4], nleapfrog=4, eps=0.01, eps_hmc=0.25, use_ncp=True, verbose=True, eps_fixed=False, use_split_xnets=False, use_separate_networks=False, merge_directions=True), learning_rate=LearningRateConfig(lr_init=0.0001, mode='auto', monitor='loss', patience=5, cooldown=0, warmup=1000, verbose=True, min_lr=1e-06, factor=0.98, min_delta=0.0001, clip_norm=1.0), annealing_schedule=AnnealingSchedule(beta_init=6.0, beta_final=6.0, dynamic=False), gradient_accumulation_steps=1, restore=False, save=False, c1=0.0, port='2345', compile=True, profile=False, init_aim=False, init_wandb=False, use_wandb=False, use_tb=False, debug_mode=False, default_mode=True, print_config=True, precision='float32', ignore_warnings=True, backend='DDP', seed=9992, ds_config_path='/Users/samforeman/projects/saforem2/l2hmc-qcd/src/l2hmc/conf/ds_config.yaml', name=None, width=200, nchains=None, compression=False)

HMC

xhmc, history_hmc = evaluate(
    nsteps=50,
    exp=ptExpSU3,
    beta=6.0,
    x=state.x,
    eps=0.1,
    nleapfrog=8,
    job_type='hmc',
    nlog=1,
    nprint=50,
    grab=True
)
[2025-12-31 11:26:28,529808][I][pytorch/experiment:117:evaluate] Running 50 steps of hmc at beta=6.0000

[2025-12-31 11:26:28,531488][I][pytorch/experiment:121:evaluate] STEP: 0

[2025-12-31 11:26:29,035175][I][pytorch/experiment:121:evaluate] STEP: 1

[2025-12-31 11:26:29,407354][I][pytorch/experiment:121:evaluate] STEP: 2

[2025-12-31 11:26:29,983133][I][pytorch/experiment:121:evaluate] STEP: 3

[2025-12-31 11:26:30,333225][I][pytorch/experiment:121:evaluate] STEP: 4

[2025-12-31 11:26:30,663439][I][pytorch/experiment:121:evaluate] STEP: 5

[2025-12-31 11:26:30,954010][I][pytorch/experiment:121:evaluate] STEP: 6

[2025-12-31 11:26:31,186336][I][pytorch/experiment:121:evaluate] STEP: 7

[2025-12-31 11:26:31,430953][I][pytorch/experiment:121:evaluate] STEP: 8

[2025-12-31 11:26:31,666223][I][pytorch/experiment:121:evaluate] STEP: 9

[2025-12-31 11:26:31,879180][I][pytorch/experiment:121:evaluate] STEP: 10

[2025-12-31 11:26:32,117035][I][pytorch/experiment:121:evaluate] STEP: 11

[2025-12-31 11:26:32,327280][I][pytorch/experiment:121:evaluate] STEP: 12

[2025-12-31 11:26:32,557935][I][pytorch/experiment:121:evaluate] STEP: 13

[2025-12-31 11:26:32,794482][I][pytorch/experiment:121:evaluate] STEP: 14

[2025-12-31 11:26:32,990057][I][pytorch/experiment:121:evaluate] STEP: 15

[2025-12-31 11:26:33,200601][I][pytorch/experiment:121:evaluate] STEP: 16

[2025-12-31 11:26:33,432070][I][pytorch/experiment:121:evaluate] STEP: 17

[2025-12-31 11:26:33,709347][I][pytorch/experiment:121:evaluate] STEP: 18

[2025-12-31 11:26:33,951243][I][pytorch/experiment:121:evaluate] STEP: 19

[2025-12-31 11:26:34,180193][I][pytorch/experiment:121:evaluate] STEP: 20

[2025-12-31 11:26:34,400996][I][pytorch/experiment:121:evaluate] STEP: 21

[2025-12-31 11:26:34,643463][I][pytorch/experiment:121:evaluate] STEP: 22

[2025-12-31 11:26:34,872693][I][pytorch/experiment:121:evaluate] STEP: 23

[2025-12-31 11:26:35,167836][I][pytorch/experiment:121:evaluate] STEP: 24

[2025-12-31 11:26:35,408609][I][pytorch/experiment:121:evaluate] STEP: 25

[2025-12-31 11:26:35,683869][I][pytorch/experiment:121:evaluate] STEP: 26

[2025-12-31 11:26:35,923071][I][pytorch/experiment:121:evaluate] STEP: 27

[2025-12-31 11:26:36,139979][I][pytorch/experiment:121:evaluate] STEP: 28

[2025-12-31 11:26:36,359490][I][pytorch/experiment:121:evaluate] STEP: 29

[2025-12-31 11:26:36,551320][I][pytorch/experiment:121:evaluate] STEP: 30

[2025-12-31 11:26:36,793600][I][pytorch/experiment:121:evaluate] STEP: 31

[2025-12-31 11:26:37,029824][I][pytorch/experiment:121:evaluate] STEP: 32

[2025-12-31 11:26:37,279040][I][pytorch/experiment:121:evaluate] STEP: 33

[2025-12-31 11:26:37,516876][I][pytorch/experiment:121:evaluate] STEP: 34

[2025-12-31 11:26:37,758814][I][pytorch/experiment:121:evaluate] STEP: 35

[2025-12-31 11:26:38,004733][I][pytorch/experiment:121:evaluate] STEP: 36

[2025-12-31 11:26:38,231411][I][pytorch/experiment:121:evaluate] STEP: 37

[2025-12-31 11:26:38,447144][I][pytorch/experiment:121:evaluate] STEP: 38

[2025-12-31 11:26:38,686414][I][pytorch/experiment:121:evaluate] STEP: 39

[2025-12-31 11:26:38,944623][I][pytorch/experiment:121:evaluate] STEP: 40

[2025-12-31 11:26:39,176781][I][pytorch/experiment:121:evaluate] STEP: 41

[2025-12-31 11:26:39,397553][I][pytorch/experiment:121:evaluate] STEP: 42

[2025-12-31 11:26:39,608587][I][pytorch/experiment:121:evaluate] STEP: 43

[2025-12-31 11:26:39,838211][I][pytorch/experiment:121:evaluate] STEP: 44

[2025-12-31 11:26:40,045575][I][pytorch/experiment:121:evaluate] STEP: 45

[2025-12-31 11:26:40,264250][I][pytorch/experiment:121:evaluate] STEP: 46

[2025-12-31 11:26:40,507323][I][pytorch/experiment:121:evaluate] STEP: 47

[2025-12-31 11:26:40,768891][I][pytorch/experiment:121:evaluate] STEP: 48

[2025-12-31 11:26:40,966295][I][pytorch/experiment:121:evaluate] STEP: 49
dataset_hmc = history_hmc.get_dataset()
_ = history_hmc.plot_all(title='HMC')

xhmc = ptExpSU3.trainer.dynamics.unflatten(xhmc)
print(f"checkSU(x_eval): {g.checkSU(xhmc)}")
print(f"checkSU(x_eval): {g.checkSU(g.projectSU(xhmc))}")
checkSU(x_eval): (tensor[8] f64 x∈[2.759e-16, 5.593e-16] μ=4.229e-16 σ=1.337e-16 [5.593e-16, 5.518e-16, 5.378e-16, 
3.061e-16, 2.875e-16, 2.759e-16, 3.254e-16, 5.396e-16], tensor[8] f64 x∈[8.414e-16, 1.735e-15] μ=1.268e-15 
σ=4.126e-16 [1.735e-15, 1.680e-15, 1.559e-15, 9.106e-16, 8.414e-16, 8.581e-16, 9.336e-16, 1.629e-15])
checkSU(x_eval): (tensor[8] f64 x∈[2.685e-16, 3.260e-16] μ=2.976e-16 σ=2.434e-17 [3.136e-16, 3.204e-16, 3.144e-16, 
2.965e-16, 2.691e-16, 2.726e-16, 2.685e-16, 3.260e-16], tensor[8] f64 x∈[7.638e-16, 1.021e-15] μ=8.975e-16 
σ=8.129e-17 [9.340e-16, 9.442e-16, 9.334e-16, 8.559e-16, 7.638e-16, 8.173e-16, 9.106e-16, 1.021e-15])

Training

import time
from l2hmc.utils.history import BaseHistory, summarize_dict

history_train = BaseHistory()
x = state.x
for step in range(100):
    # log.info(f'HMC STEP: {step}')
    tic = time.perf_counter()
    x, metrics_ = ptExpSU3.trainer.train_step(
        (x, state.beta)
    )
    toc = time.perf_counter()
    metrics = {
        'train_step': step,
        'dt': toc - tic,
        **metrics_,
    }
    if step % 5 == 0:
        avgs = history_train.update(metrics)
        summary = summarize_dict(avgs)
        logger.info(summary)
[2025-12-31 11:26:45,774938][I][ipykernel_27083/30352159:21:<module>] train_step=0 dt=1.783 energy=16.488 logprob=16.428 logdet=0.060 sldf=0.048 sldb=-0.074 sld=0.060 xeps=0.010 veps=0.010 acc=0.259 sumlogdet=0.014 beta=6.000 acc_mask=0.125 loss=84.620 plaqs=-0.003 sinQ=0.004 intQ=0.063 dQint=0.010 dQsin=0.001

[2025-12-31 11:26:55,987458][I][ipykernel_27083/30352159:21:<module>] train_step=5 dt=1.938 energy=-352.911 logprob=-352.923 logdet=0.012 sldf=-0.008 sldb=0.039 sld=0.012 xeps=0.010 veps=0.010 acc=0.856 sumlogdet=0.111 beta=6.000 acc_mask=0.875 loss=-563.470 plaqs=0.039 sinQ=0.004 intQ=0.064 dQint=0.022 dQsin=0.001

[2025-12-31 11:27:06,156149][I][ipykernel_27083/30352159:21:<module>] train_step=10 dt=2.970 energy=-791.885 logprob=-791.874 logdet=-0.011 sldf=-0.001 sldb=-0.011 sld=-0.011 xeps=0.011 veps=0.010 acc=1.000 sumlogdet=-0.033 beta=6.000 acc_mask=1.000 loss=-525.716 plaqs=0.086 sinQ=0.003 intQ=0.044 dQint=0.021 dQsin=0.001

[2025-12-31 11:27:16,696312][I][ipykernel_27083/30352159:21:<module>] train_step=15 dt=2.311 energy=-1187.403 logprob=-1187.339 logdet=-0.064 sldf=-0.040 sldb=0.033 sld=-0.064 xeps=0.011 veps=0.010 acc=0.880 sumlogdet=0.027 beta=6.000 acc_mask=0.875 loss=-386.327 plaqs=0.130 sinQ=0.002 intQ=0.036 dQint=0.009 dQsin=0.001

[2025-12-31 11:27:28,358172][I][ipykernel_27083/30352159:21:<module>] train_step=20 dt=2.309 energy=-1478.071 logprob=-1478.001 logdet=-0.071 sldf=-0.047 sldb=0.056 sld=-0.071 xeps=0.012 veps=0.010 acc=0.814 sumlogdet=0.014 beta=6.000 acc_mask=0.750 loss=-303.861 plaqs=0.165 sinQ=0.003 intQ=0.041 dQint=0.020 dQsin=0.001

[2025-12-31 11:27:38,087055][I][ipykernel_27083/30352159:21:<module>] train_step=25 dt=1.907 energy=-1820.602 logprob=-1820.769 logdet=0.167 sldf=0.096 sldb=-0.084 sld=0.167 xeps=0.012 veps=0.010 acc=0.879 sumlogdet=0.056 beta=6.000 acc_mask=0.875 loss=-234.227 plaqs=0.195 sinQ=0.004 intQ=0.059 dQint=0.022 dQsin=0.001

[2025-12-31 11:27:48,223014][I][ipykernel_27083/30352159:21:<module>] train_step=30 dt=2.183 energy=-2009.020 logprob=-2009.196 logdet=0.176 sldf=0.103 sldb=-0.089 sld=0.176 xeps=0.013 veps=0.011 acc=0.792 sumlogdet=0.029 beta=6.000 acc_mask=0.750 loss=-213.443 plaqs=0.218 sinQ=0.001 intQ=0.018 dQint=0.007 dQsin=0.000

[2025-12-31 11:27:59,254749][I][ipykernel_27083/30352159:21:<module>] train_step=35 dt=2.014 energy=-2203.617 logprob=-2203.707 logdet=0.090 sldf=0.059 sldb=-0.062 sld=0.090 xeps=0.013 veps=0.011 acc=0.739 sumlogdet=0.007 beta=6.000 acc_mask=0.625 loss=-266.970 plaqs=0.239 sinQ=0.001 intQ=0.014 dQint=0.007 dQsin=0.001

[2025-12-31 11:28:09,621123][I][ipykernel_27083/30352159:21:<module>] train_step=40 dt=2.192 energy=-2368.273 logprob=-2368.449 logdet=0.176 sldf=0.098 sldb=-0.072 sld=0.176 xeps=0.013 veps=0.011 acc=0.687 sumlogdet=0.039 beta=6.000 acc_mask=0.625 loss=-144.611 plaqs=0.259 sinQ=0.001 intQ=0.022 dQint=0.011 dQsin=0.001

[2025-12-31 11:28:20,251972][I][ipykernel_27083/30352159:21:<module>] train_step=45 dt=2.210 energy=-2545.097 logprob=-2545.479 logdet=0.382 sldf=0.243 sldb=-0.258 sld=0.382 xeps=0.013 veps=0.011 acc=0.877 sumlogdet=-0.040 beta=6.000 acc_mask=0.875 loss=-229.297 plaqs=0.272 sinQ=0.002 intQ=0.030 dQint=0.019 dQsin=0.001

[2025-12-31 11:28:31,115861][I][ipykernel_27083/30352159:21:<module>] train_step=50 dt=2.359 energy=-2690.892 logprob=-2691.361 logdet=0.469 sldf=0.288 sldb=-0.284 sld=0.469 xeps=0.013 veps=0.012 acc=0.643 sumlogdet=0.021 beta=6.000 acc_mask=0.750 loss=-107.596 plaqs=0.292 sinQ=0.001 intQ=0.017 dQint=0.021 dQsin=0.001

[2025-12-31 11:28:43,176395][I][ipykernel_27083/30352159:21:<module>] train_step=55 dt=2.363 energy=-2816.397 logprob=-2816.817 logdet=0.420 sldf=0.254 sldb=-0.236 sld=0.420 xeps=0.013 veps=0.012 acc=0.914 sumlogdet=0.039 beta=6.000 acc_mask=0.875 loss=-226.396 plaqs=0.306 sinQ=0.001 intQ=0.011 dQint=0.015 dQsin=0.001

[2025-12-31 11:28:56,075223][I][ipykernel_27083/30352159:21:<module>] train_step=60 dt=2.309 energy=-2976.689 logprob=-2977.024 logdet=0.334 sldf=0.217 sldb=-0.234 sld=0.334 xeps=0.013 veps=0.013 acc=1.000 sumlogdet=-0.045 beta=6.000 acc_mask=1.000 loss=-418.937 plaqs=0.323 sinQ=-0.001 intQ=-0.012 dQint=0.013 dQsin=0.001

[2025-12-31 11:29:07,043464][I][ipykernel_27083/30352159:21:<module>] train_step=65 dt=1.863 energy=-3170.534 logprob=-3170.775 logdet=0.240 sldf=0.129 sldb=-0.087 sld=0.240 xeps=0.013 veps=0.013 acc=0.927 sumlogdet=0.139 beta=6.000 acc_mask=1.000 loss=-467.083 plaqs=0.342 sinQ=-0.001 intQ=-0.017 dQint=0.016 dQsin=0.001

[2025-12-31 11:29:20,040359][I][ipykernel_27083/30352159:21:<module>] train_step=70 dt=3.664 energy=-3373.085 logprob=-3373.044 logdet=-0.041 sldf=-0.031 sldb=0.043 sld=-0.041 xeps=0.013 veps=0.014 acc=1.000 sumlogdet=0.027 beta=6.000 acc_mask=1.000 loss=-442.733 plaqs=0.364 sinQ=0.000 intQ=0.000 dQint=0.015 dQsin=0.001

[2025-12-31 11:29:32,386191][I][ipykernel_27083/30352159:21:<module>] train_step=75 dt=2.340 energy=-3539.253 logprob=-3539.038 logdet=-0.215 sldf=-0.118 sldb=0.093 sld=-0.215 xeps=0.013 veps=0.014 acc=1.000 sumlogdet=-0.097 beta=6.000 acc_mask=1.000 loss=-422.878 plaqs=0.383 sinQ=-0.000 intQ=-0.003 dQint=0.010 dQsin=0.001

[2025-12-31 11:29:43,235888][I][ipykernel_27083/30352159:21:<module>] train_step=80 dt=2.135 energy=-3684.596 logprob=-3684.128 logdet=-0.468 sldf=-0.279 sldb=0.258 sld=-0.468 xeps=0.012 veps=0.015 acc=1.000 sumlogdet=-0.118 beta=6.000 acc_mask=1.000 loss=-586.133 plaqs=0.401 sinQ=-0.001 intQ=-0.009 dQint=0.016 dQsin=0.001

[2025-12-31 11:29:54,087841][I][ipykernel_27083/30352159:21:<module>] train_step=85 dt=2.002 energy=-3928.863 logprob=-3928.108 logdet=-0.755 sldf=-0.496 sldb=0.543 sld=-0.755 xeps=0.012 veps=0.015 acc=1.000 sumlogdet=0.147 beta=6.000 acc_mask=1.000 loss=-754.558 plaqs=0.421 sinQ=0.000 intQ=0.001 dQint=0.013 dQsin=0.001

[2025-12-31 11:30:04,526208][I][ipykernel_27083/30352159:21:<module>] train_step=90 dt=2.136 energy=-4076.174 logprob=-4075.097 logdet=-1.077 sldf=-0.694 sldb=0.746 sld=-1.077 xeps=0.012 veps=0.016 acc=1.000 sumlogdet=0.154 beta=6.000 acc_mask=1.000 loss=-690.198 plaqs=0.444 sinQ=0.000 intQ=0.005 dQint=0.019 dQsin=0.001

[2025-12-31 11:30:14,608929][I][ipykernel_27083/30352159:21:<module>] train_step=95 dt=2.042 energy=-4288.738 logprob=-4287.080 logdet=-1.658 sldf=-1.053 sldb=1.092 sld=-1.658 xeps=0.012 veps=0.016 acc=1.000 sumlogdet=0.110 beta=6.000 acc_mask=1.000 loss=-753.415 plaqs=0.465 sinQ=0.001 intQ=0.016 dQint=0.011 dQsin=0.001
dataset_train = history_train.get_dataset()
_ = history_train.plot_all(
    title='Train',
    num_chains=x.shape[0],
)

Evaluation

# state = ptExpSU3.trainer.dynamics.random_state(6.0)
xeval, history_eval = evaluate(
    nsteps=50,
    exp=ptExpSU3,
    beta=6.0,
    # x=state.x,
    job_type='eval',
    nlog=1,
    nprint=50,
    grab=True,
)
[2025-12-31 11:30:28,599797][I][pytorch/experiment:117:evaluate] Running 50 steps of eval at beta=6.0000

[2025-12-31 11:30:28,601778][I][pytorch/experiment:121:evaluate] STEP: 0

[2025-12-31 11:30:29,267650][I][pytorch/experiment:121:evaluate] STEP: 1

[2025-12-31 11:30:30,014945][I][pytorch/experiment:121:evaluate] STEP: 2

[2025-12-31 11:30:30,722773][I][pytorch/experiment:121:evaluate] STEP: 3

[2025-12-31 11:30:31,344237][I][pytorch/experiment:121:evaluate] STEP: 4

[2025-12-31 11:30:31,979831][I][pytorch/experiment:121:evaluate] STEP: 5

[2025-12-31 11:30:32,595698][I][pytorch/experiment:121:evaluate] STEP: 6

[2025-12-31 11:30:33,208166][I][pytorch/experiment:121:evaluate] STEP: 7

[2025-12-31 11:30:33,721241][I][pytorch/experiment:121:evaluate] STEP: 8

[2025-12-31 11:30:34,271252][I][pytorch/experiment:121:evaluate] STEP: 9

[2025-12-31 11:30:34,809963][I][pytorch/experiment:121:evaluate] STEP: 10

[2025-12-31 11:30:35,310404][I][pytorch/experiment:121:evaluate] STEP: 11

[2025-12-31 11:30:35,851149][I][pytorch/experiment:121:evaluate] STEP: 12

[2025-12-31 11:30:36,373049][I][pytorch/experiment:121:evaluate] STEP: 13

[2025-12-31 11:30:36,900532][I][pytorch/experiment:121:evaluate] STEP: 14

[2025-12-31 11:30:37,647661][I][pytorch/experiment:121:evaluate] STEP: 15

[2025-12-31 11:30:38,232686][I][pytorch/experiment:121:evaluate] STEP: 16

[2025-12-31 11:30:39,187409][I][pytorch/experiment:121:evaluate] STEP: 17

[2025-12-31 11:30:39,760550][I][pytorch/experiment:121:evaluate] STEP: 18

[2025-12-31 11:30:40,323858][I][pytorch/experiment:121:evaluate] STEP: 19

[2025-12-31 11:30:40,861112][I][pytorch/experiment:121:evaluate] STEP: 20

[2025-12-31 11:30:41,429535][I][pytorch/experiment:121:evaluate] STEP: 21

[2025-12-31 11:30:41,993008][I][pytorch/experiment:121:evaluate] STEP: 22

[2025-12-31 11:30:42,503927][I][pytorch/experiment:121:evaluate] STEP: 23

[2025-12-31 11:30:43,019707][I][pytorch/experiment:121:evaluate] STEP: 24

[2025-12-31 11:30:43,592506][I][pytorch/experiment:121:evaluate] STEP: 25

[2025-12-31 11:30:44,433723][I][pytorch/experiment:121:evaluate] STEP: 26

[2025-12-31 11:30:45,037540][I][pytorch/experiment:121:evaluate] STEP: 27

[2025-12-31 11:30:45,580945][I][pytorch/experiment:121:evaluate] STEP: 28

[2025-12-31 11:30:46,149974][I][pytorch/experiment:121:evaluate] STEP: 29

[2025-12-31 11:30:46,715949][I][pytorch/experiment:121:evaluate] STEP: 30

[2025-12-31 11:30:47,406651][I][pytorch/experiment:121:evaluate] STEP: 31

[2025-12-31 11:30:48,003481][I][pytorch/experiment:121:evaluate] STEP: 32

[2025-12-31 11:30:48,572075][I][pytorch/experiment:121:evaluate] STEP: 33

[2025-12-31 11:30:49,147091][I][pytorch/experiment:121:evaluate] STEP: 34

[2025-12-31 11:30:49,858846][I][pytorch/experiment:121:evaluate] STEP: 35

[2025-12-31 11:30:50,544188][I][pytorch/experiment:121:evaluate] STEP: 36

[2025-12-31 11:30:51,219625][I][pytorch/experiment:121:evaluate] STEP: 37

[2025-12-31 11:30:51,723718][I][pytorch/experiment:121:evaluate] STEP: 38

[2025-12-31 11:30:52,310313][I][pytorch/experiment:121:evaluate] STEP: 39

[2025-12-31 11:30:52,820322][I][pytorch/experiment:121:evaluate] STEP: 40

[2025-12-31 11:30:53,397228][I][pytorch/experiment:121:evaluate] STEP: 41

[2025-12-31 11:30:53,969039][I][pytorch/experiment:121:evaluate] STEP: 42

[2025-12-31 11:30:54,665881][I][pytorch/experiment:121:evaluate] STEP: 43

[2025-12-31 11:30:55,330274][I][pytorch/experiment:121:evaluate] STEP: 44

[2025-12-31 11:30:55,860187][I][pytorch/experiment:121:evaluate] STEP: 45

[2025-12-31 11:30:56,445580][I][pytorch/experiment:121:evaluate] STEP: 46

[2025-12-31 11:30:56,977589][I][pytorch/experiment:121:evaluate] STEP: 47

[2025-12-31 11:30:57,527302][I][pytorch/experiment:121:evaluate] STEP: 48

[2025-12-31 11:30:58,064761][I][pytorch/experiment:121:evaluate] STEP: 49
dataset_eval = history_eval.get_dataset()
_ = history_eval.plot_all(title='Eval')

xeval = ptExpSU3.trainer.dynamics.unflatten(xeval)
logger.info(f"checkSU(x_eval): {g.checkSU(xeval)}")
logger.info(f"checkSU(x_eval): {g.checkSU(g.projectSU(xeval))}")
[2025-12-31 11:31:03,143620][I][ipykernel_27083/2193937887:2:<module>] checkSU(x_eval): (tensor[8] f64 x∈[1.361e-16, 1.471e-16] μ=1.426e-16 σ=3.279e-18 [1.423e-16, 1.408e-16, 1.361e-16, 1.453e-16, 1.441e-16, 1.419e-16, 1.471e-16, 1.427e-16], tensor[8] f64 x∈[6.307e-16, 7.649e-16] μ=7.129e-16 σ=4.504e-17 [7.452e-16, 7.239e-16, 7.131e-16, 6.307e-16, 7.176e-16, 7.649e-16, 7.451e-16, 6.627e-16])

[2025-12-31 11:31:03,159261][I][ipykernel_27083/2193937887:3:<module>] checkSU(x_eval): (tensor[8] f64 x∈[1.346e-16, 1.508e-16] μ=1.417e-16 σ=4.803e-18 [1.442e-16, 1.422e-16, 1.400e-16, 1.401e-16, 1.508e-16, 1.381e-16, 1.346e-16, 1.435e-16], tensor[8] f64 x∈[4.963e-16, 7.911e-16] μ=6.374e-16 σ=1.020e-16 [5.428e-16, 6.833e-16, 5.895e-16, 7.130e-16, 7.131e-16, 5.702e-16, 4.963e-16, 7.911e-16])
import matplotlib.pyplot as plt
pdiff = dataset_eval.plaqs - dataset_hmc.plaqs
pdiff
import xarray as xr

fig, ax = plt.subplots(figsize=(12, 4))
(pdiff ** 2).plot(ax=ax)  #, robust=True)
ax.set_title(r"$\left|\delta U_{\mu\nu}\right|^{2}$ (HMC - Eval)")
outfile = Path(EVAL_DIR).joinpath('pdiff.svg')
#%xmode fig.savefig(outfile.as_posix(), dpi=400, bbox_inches='tight')
plt.show()

Citation

BibTeX citation:
@online{foreman2024,
  author = {Foreman, Sam},
  title = {🔳 `L2hmc-Qcd` {Example:} {4D} {SU(3)}},
  date = {2024-07-24},
  url = {https://samforeman.me/posts/jupyter/l2hmc-4dSU3/},
  langid = {en}
}
For attribution, please cite this work as:
Foreman, Sam. 2024. “🔳 `L2hmc-Qcd` Example: 4D SU(3).” July 24, 2024. https://samforeman.me/posts/jupyter/l2hmc-4dSU3/.