๐Ÿ”ณ l2hmc-qcd Example: 4D SU(3)

ai4science
lqcd
mcmc
Author
Affiliation
Published

December 6, 2023

Modified

May 2, 2025

Imports

# %matplotlib inline
import matplotlib_inline
matplotlib_inline.backend_inline.set_matplotlib_formats('svg')
import os
os.environ['COLORTERM'] = 'truecolor'
import lovely_tensors as lt
lt.monkey_patch()
lt.set_config(color=False)
# automatically detect and reload local changes to modules
%load_ext autoreload
%autoreload 2
import ezpz
import numpy as np
import matplotlib.pyplot as plt
from l2hmc.utils.plot_helpers import FigAxes
import ambivalent
plt.style.use(ambivalent.STYLES['ambivalent'])
#set_plot_style()
Output:

[2025-04-30 15:42:06,938] [INFO] [real_accelerator.py:222:get_accelerator] Setting ds_accelerator to mps (auto detect)

W0430 15:42:09.268000 24193 site-packages/torch/distributed/elastic/multiprocessing/redirects.py:29] NOTE: Redirects are currently not supported in Windows or MacOs.

Using device: cpu

Failed to download font: IBM Plex Sans, skipping! Failed to download font: IBM Plex Sans Condensed, skipping!

Failed to download font: IBM Plex Serif, skipping!

import ezpz
from pathlib import Path
from typing import Optional
from rich import print

import lovely_tensors as lt
import matplotlib.pyplot as plt
import numpy as np
import torch
import yaml

# from l2hmc.utils.dist import setup_torch
seed = np.random.randint(2 ** 32)
print(f"seed: {seed}")

_ = ezpz.setup_torch(seed=seed)
torch.set_default_dtype(torch.float64)

logger = ezpz.get_logger()

import l2hmc.group.su3.pytorch.group as g
from l2hmc.common import grab_tensor, print_dict
from l2hmc.configs import dict_to_list_of_overrides, get_experiment
from l2hmc.experiment.pytorch.experiment import Experiment, evaluate  # noqa
Output:
seed: 3107208906
[2025-04-30 15:42:15][I][ezpz/dist:557] Using get_torch_device_type()='mps' with backend='gloo'
[2025-04-30 15:42:15][I][ezpz/dist:873] Using device='mps' with backend='ddp' + 'gloo' for distributed training.
[2025-04-30 15:42:15][I][ezpz/dist:923] ['Sams-MacBook-Pro-2.local'][0/0]
from l2hmc.utils.plot_helpers import (  # noqa
    plot_scalar,
    plot_chains,
    plot_leapfrogs
)

def savefig(fig: plt.Figure, fname: str, outdir: os.PathLike):
    pngfile = Path(outdir).joinpath(f"pngs/{fname}.png")
    svgfile = Path(outdir).joinpath(f"svgs/{fname}.svg")
    pngfile.parent.mkdir(exist_ok=True, parents=True)
    svgfile.parent.mkdir(exist_ok=True, parents=True)
    fig.savefig(svgfile, transparent=True, bbox_inches='tight')
    fig.savefig(pngfile, transparent=True, bbox_inches='tight', dpi=300)

def plot_metrics(metrics: dict, title: Optional[str] = None, **kwargs):
    outdir = Path(f"./plots-4dSU3/{title}")
    outdir.mkdir(exist_ok=True, parents=True)
    for key, val in metrics.items():
        fig, ax = plot_metric(val, name=key, **kwargs)
        if title is not None:
            ax.set_title(title)
        console.log(f"Saving {key} to {outdir}")
        savefig(fig, f"{key}", outdir=outdir)
        plt.show()

def plot_metric(
    metric: torch.Tensor,
    name: Optional[str] = None,
    **kwargs,
):
    assert len(metric) > 0
    if isinstance(metric[0], (int, float, bool, np.floating)):
        y = np.stack(metric)
        return plot_scalar(y, ylabel=name, **kwargs)
    element_shape = metric[0].shape
    if len(element_shape) == 2:
        y = grab_tensor(torch.stack(metric))
        return plot_leapfrogs(y, ylabel=name)
    if len(element_shape) == 1:
        y = grab_tensor(torch.stack(metric))
        return plot_chains(y, ylabel=name, **kwargs)
    if len(element_shape) == 0:
        y = grab_tensor(torch.stack(metric))
        return plot_scalar(y, ylabel=name, **kwargs)
    raise ValueError

Load config + build Experiment

from rich import print

from l2hmc.configs import CONF_DIR
su3conf = Path(f"{CONF_DIR}/su3test.yaml")
with su3conf.open('r') as stream:
    conf = dict(yaml.safe_load(stream))
import json
from rich import print_json
print_json(json.dumps(conf, indent=4, sort_keys=True))
overrides = dict_to_list_of_overrides(conf)
Output:
{
  "annealing_schedule": {
    "beta_final": 6.0,
    "beta_init": 6.0
  },
  "backend": "DDP",
  "conv": "none",
  "dynamics": {
    "eps": 0.01,
    "eps_fixed": false,
    "group": "SU3",
    "latvolume": [
        4,
        4,
        4,
        4
    ],
    "merge_directions": true,
    "nchains": 8,
    "nleapfrog": 4,
    "use_separate_networks": false,
    "use_split_xnets": false,
    "verbose": true
  },
  "framework": "pytorch",
  "init_aim": false,
  "init_wandb": false,
  "learning_rate": {
    "clip_norm": 1.0,
    "lr_init": "1e-04"
  },
  "loss": {
    "aux_weight": 0.0,
    "charge_weight": 0.0,
    "plaq_weight": 0.1,
    "rmse_weight": 0.1,
    "use_mixed_loss": true
  },
  "net_weights": {
    "v": {
        "q": 1.0,
        "s": 1.0,
        "t": 1.0
    },
    "x": {
        "q": 1.0,
        "s": 0.0,
        "t": 1.0
    }
  },
  "network": {
    "activation_fn": "tanh",
    "dropout_prob": 0.0,
    "units": [
        256
    ],
    "use_batch_norm": false
  },
  "restore": false,
  "save": false,
  "steps": {
    "log": 1,
    "nepoch": 10,
    "nera": 1,
    "print": 1,
    "test": 50
  },
  "use_tb": false,
  "use_wandb": false
}
ptExpSU3 = get_experiment(overrides=[*overrides], build_networks=True)

# console.print(ptExpSU3.config)
state = ptExpSU3.trainer.dynamics.random_state(6.0)
logger.info(f"checkSU(state.x): {g.checkSU(state.x)}")
logger.info(f"checkSU(state.x): {g.checkSU(g.projectSU(state.x))}")
assert isinstance(state.x, torch.Tensor)
assert isinstance(state.beta, torch.Tensor)
assert isinstance(ptExpSU3, Experiment)
Output:
[2025-04-30 15:42:54][I][utils/dist:229:l2hmc.utils.dist] Caught MASTER_PORT:1234 from environment!
[2025-04-30 15:42:54][I][utils/dist:229:l2hmc.utils.dist] Caught MASTER_PORT:1234 from environment!
[2025-04-30 15:42:54][W][pytorch/trainer:467:l2hmc.trainers.pytorch.trainer] Using torch.float32 on cpu!
[2025-04-30 15:42:54][W][pytorch/trainer:467:l2hmc.trainers.pytorch.trainer] Using `torch.optim.Adam` optimizer
[2025-04-30 15:42:54][I][pytorch/trainer:305:l2hmc.trainers.pytorch.trainer] num_params in model: 27880456
[2025-04-30 15:42:55][W][pytorch/trainer:271:l2hmc.trainers.pytorch.trainer] logging with freq 1 for wandb.watch
[2025-04-30 15:42:55][I][ipykernel_24193/1455121896:5:ezpz.log] checkSU(state.x): (tensor[8] f64 xโˆˆ[1.374e-14, 2.051e-13] ฮผ=5.186e-14 ฯƒ=6.363e-14 [2.328e-14, 1.850e-14, 5.587e-14, 2.051e-13, 4.692e-14, 1.374e-14, 1.985e-14, 3.163e-14], tensor[8] f64 xโˆˆ[2.012e-13, 6.500e-12] ฮผ=1.497e-12 ฯƒ=2.096e-12 [4.096e-13, 4.134e-13, 1.774e-12, 6.500e-12, 1.446e-12, 2.012e-13, 4.689e-13, 7.596e-13])
[2025-04-30 15:42:55][I][ipykernel_24193/1455121896:6:ezpz.log] checkSU(state.x): (tensor[8] f64 xโˆˆ[2.705e-16, 2.883e-16] ฮผ=2.798e-16 ฯƒ=5.148e-18 [2.763e-16, 2.705e-16, 2.803e-16, 2.883e-16, 2.825e-16, 2.819e-16, 2.804e-16, 2.783e-16], tensor[8] f64 xโˆˆ[8.900e-16, 9.337e-16] ฮผ=9.098e-16 ฯƒ=1.819e-17 [8.903e-16, 9.331e-16, 9.147e-16, 8.903e-16, 9.337e-16, 9.114e-16, 8.900e-16, 9.147e-16])
# from l2hmc.utils.plot_helpers import set_plot_style
# set_plot_style()

from l2hmc.common import get_timestamp
TSTAMP = get_timestamp()
OUTPUT_DIR = Path(f"./outputs/pt4dSU3/{TSTAMP}")
HMC_DIR = OUTPUT_DIR.joinpath('hmc')
EVAL_DIR = OUTPUT_DIR.joinpath('eval')
TRAIN_DIR = OUTPUT_DIR.joinpath('train')
HMC_DIR.mkdir(exist_ok=True, parents=True)
EVAL_DIR.mkdir(exist_ok=True, parents=True)
TRAIN_DIR.mkdir(exist_ok=True, parents=True)
ptExpSU3.trainer.print_grads_and_weights()
logger.info(ptExpSU3.config)
#console.print(ptExpSU3.config)
Output:
[2025-04-30 15:43:03][I][pytorch/trainer:2000:l2hmc.trainers.pytorch.trainer] --------------------------------------------------------------------------------
[2025-04-30 15:43:03][I][pytorch/trainer:2001:l2hmc.trainers.pytorch.trainer] GRADS:
[2025-04-30 15:43:03][I][l2hmc/common:97] networks.xnet.input_layer.xlayer.weight: None None
None
networks.xnet.input_layer.xlayer.bias: None None
None
networks.xnet.input_layer.vlayer.weight: None None
None
networks.xnet.input_layer.vlayer.bias: None None
None
networks.xnet.scale.coeff: None None
None
networks.xnet.scale.layer.weight: None None
None
networks.xnet.scale.layer.bias: None None
None
networks.xnet.transf.coeff: None None
None
networks.xnet.transf.layer.weight: None None
None
networks.xnet.transf.layer.bias: None None
None
networks.xnet.transl.weight: None None
None
networks.xnet.transl.bias: None None
None
networks.vnet.input_layer.xlayer.weight: None None
None
networks.vnet.input_layer.xlayer.bias: None None
None
networks.vnet.input_layer.vlayer.weight: None None
None
networks.vnet.input_layer.vlayer.bias: None None
None
networks.vnet.scale.coeff: None None
None
networks.vnet.scale.layer.weight: None None
None
networks.vnet.scale.layer.bias: None None
None
networks.vnet.transf.coeff: None None
None
networks.vnet.transf.layer.weight: None None
None
networks.vnet.transf.layer.bias: None None
None
networks.vnet.transl.weight: None None
None
networks.vnet.transl.bias: None None
None
xeps.0: None None
None
xeps.1: None None
None
xeps.2: None None
None
xeps.3: None None
None
veps.0: None None
None
veps.1: None None
None
veps.2: None None
None
veps.3: None None
None
[2025-04-30 15:43:03][I][pytorch/trainer:2003:l2hmc.trainers.pytorch.trainer] --------------------------------------------------------------------------------
[2025-04-30 15:43:03][I][pytorch/trainer:2004:l2hmc.trainers.pytorch.trainer] WEIGHTS:
[2025-04-30 15:43:03][I][l2hmc/common:97] networks.xnet.input_layer.xlayer.weight: torch.Size([256, 18432]) torch.float64
[[-4.19951343e-03 -4.26238340e-03  2.35647156e-03 ...  1.68137878e-03
  -1.50068399e-03  3.15863944e-03]
 [ 2.39342553e-04 -4.97551288e-03  7.26050381e-03 ... -5.64728922e-03
  -5.55020874e-03  1.17323965e-03]
 [ 5.26405398e-03 -3.91491501e-03  7.14260998e-04 ... -6.31723398e-03
   5.23567487e-03  2.05607864e-03]
 ...
 [ 6.68730979e-03 -6.91921820e-03  8.95772739e-05 ... -4.07246778e-03
  -2.29316978e-03 -3.43056826e-03]
 [-1.77016491e-03 -1.95781418e-03 -5.01585640e-03 ... -6.46914302e-03
   4.20147742e-03 -7.09234809e-03]
 [-1.94973833e-03 -7.26403514e-03  4.98051500e-04 ... -6.28795095e-03
   6.79912449e-04 -9.21666063e-04]]
networks.xnet.input_layer.xlayer.bias: torch.Size([256]) torch.float64
[ 3.15653089e-03 -6.53747057e-03 -6.14933637e-03 -5.49529500e-03
 -4.48003873e-03 -7.03491801e-03 -6.56321877e-03  4.67182499e-03
  4.36532234e-03 -5.36476346e-03 -6.09211776e-03 -7.10460550e-03
 -6.52009109e-03 -6.43876671e-03 -1.97297283e-03  5.79927976e-03
 -5.49722940e-03  6.75426645e-03  6.06732313e-03  2.56460577e-03
 -5.80235106e-03  1.03618414e-03  6.36349657e-04  1.60436063e-03
 -3.06399032e-05 -7.50359147e-04  4.36385955e-03  1.00302522e-03
  7.32044498e-03 -1.43824191e-03 -2.50594005e-03 -6.82552956e-03
 -4.71028329e-03  1.71534301e-03  6.10709162e-03 -4.05667409e-03
 -2.30713007e-03 -5.12144947e-03 -7.15109109e-04 -4.44328498e-03
 -4.94695187e-04  3.15081409e-03 -2.37185464e-03 -1.91157742e-03
  6.91548413e-03  3.81991132e-04 -4.31922342e-03 -1.98542428e-03
  4.05912039e-03  1.92396383e-03  3.53810871e-03  6.53665668e-04
 -5.06483635e-04 -5.86019994e-03 -2.86685524e-03 -2.27205811e-03
  7.25025555e-03 -5.94056580e-03 -1.54673733e-03  8.73715266e-04
 -4.36832096e-03  4.80786903e-03  1.69290306e-03 -1.38375220e-03
  6.96793511e-03  5.39967604e-03 -4.38277881e-03  4.86837191e-03
 -4.63730538e-03 -2.72580171e-04  5.81442710e-03  1.45265237e-03
  5.09906755e-03 -5.65087549e-03 -1.40143350e-03 -6.84733200e-03
  1.19933276e-03  7.28487144e-03 -1.88899102e-03 -1.74111447e-03
 -2.23510673e-03  2.73357402e-03 -1.06047067e-03 -5.84367407e-04
 -1.07953771e-03 -7.04448438e-03  3.29054140e-03  1.94660675e-04
 -1.10677934e-03 -2.41696262e-03 -1.78325949e-03  1.03577164e-03
  7.27804628e-03 -5.85394151e-03 -4.49392971e-03 -6.15160667e-03
 -4.20118105e-03  4.69744100e-03 -4.00506019e-03 -3.18274408e-03
  6.46112415e-03  2.89159411e-03  5.58561882e-05 -2.93133409e-03
  8.16798907e-04 -6.32176171e-03  5.56053897e-03 -6.36041217e-04
 -5.79233323e-03  5.06384306e-03 -3.84639758e-03  4.87318855e-03
  2.09912290e-04  4.33691731e-03  3.37906617e-03 -5.05196960e-03
  1.02895130e-03  4.08134740e-03 -1.53992986e-03  2.17678400e-03
 -6.35111539e-03 -5.66248347e-03 -1.10881411e-03 -4.16939110e-03
 -2.41698897e-03  7.25206709e-03  3.52455892e-03 -2.72657199e-03
 -4.46049487e-03  6.27997800e-03  4.12822311e-03 -4.29568963e-03
  3.27822525e-03  4.74422437e-04 -7.36276516e-03  6.08754623e-04
  5.07836316e-03 -2.49084412e-03  6.68436632e-03  1.63677606e-03
 -6.40259777e-03 -3.89026602e-03  5.53970429e-04 -4.87297662e-03
  5.29615772e-03 -7.04606173e-03  3.74678711e-03  3.76877809e-03
  4.02093817e-03 -4.28408453e-04 -5.69630100e-03  7.00086373e-03
 -3.02440694e-03  2.58179140e-03  5.33379816e-03 -6.61146570e-03
  1.48293246e-03  2.37842092e-03 -4.01829177e-03  5.58703024e-03
 -5.74216662e-04  5.99784977e-03 -4.73664072e-03  2.37927320e-03
 -3.39304510e-03 -2.54775472e-03  4.55417377e-03  8.61042071e-04
  5.07867546e-03  5.38944149e-03  8.43247180e-05 -3.12868764e-03
 -1.34245397e-03  6.48412301e-03  7.15439817e-03  1.66651130e-04
  4.06732860e-03  2.83814521e-03  7.24635495e-03  6.22788597e-03
  4.20149888e-03  6.49253302e-04 -3.91114083e-03 -3.58432497e-03
 -3.86471098e-03  2.81108473e-03  1.78824730e-03  1.68233903e-03
  2.84089912e-03  6.25876716e-03 -4.43284225e-03  1.18857102e-03
 -4.23199176e-03  6.52209025e-03  3.10487973e-04  7.15578276e-03
 -2.14404584e-03  5.07581213e-03 -5.91204650e-03  6.67434357e-03
 -3.67739919e-04  4.18005262e-04 -2.95130127e-04  4.67975206e-03
  1.59355487e-03  5.93234468e-03 -2.09718803e-03  2.76864020e-03
  1.19992438e-03  5.40846449e-03  1.29281665e-03  7.07159381e-03
  1.41238172e-03 -5.80262790e-03  3.72717315e-03 -4.90456269e-03
 -6.72488463e-03 -5.56823441e-03 -5.23051378e-03 -6.76384914e-03
 -3.99397980e-03  3.39150096e-03  2.88789147e-03 -5.20359136e-04
  2.38952795e-03  1.60403547e-03 -2.61388670e-03  2.66299234e-03
 -8.18143279e-04 -2.57845668e-03  4.40694062e-03  2.04237922e-03
  5.15184399e-03 -3.10021599e-03  6.17029479e-03  7.23334944e-03
  3.18039373e-03 -1.86054513e-03 -3.84445095e-03  3.61909527e-03
 -6.88008789e-03 -4.94012773e-03  3.70646320e-03  5.90539612e-03
 -6.96513871e-03  5.62814758e-03 -7.02268337e-03 -7.26117535e-03
  4.51210770e-03 -4.71571024e-03 -1.22832005e-04 -8.83315043e-04
 -9.59028740e-04  6.38134499e-03 -2.59574649e-03 -1.15310570e-03]
networks.xnet.input_layer.vlayer.weight: torch.Size([256, 18432]) torch.float64
[[ 0.00463266 -0.00236656 -0.00706409 ... -0.00116637 -0.0052418
  -0.00603106]
 [-0.0070495  -0.00672454 -0.00573184 ... -0.0050628   0.00602185
  -0.00735768]
 [ 0.00696305 -0.00561956 -0.00690782 ...  0.0012015   0.00509584
   0.00545731]
 ...
 [-0.00098264  0.00689178 -0.00444889 ... -0.0024313   0.0057689
   0.00241326]
 [-0.00321381 -0.00520019 -0.00651976 ... -0.00382101 -0.00310216
  -0.00372552]
 [-0.00202929  0.00629375  0.00024414 ... -0.00602879  0.0022714
  -0.00331077]]
networks.xnet.input_layer.vlayer.bias: torch.Size([256]) torch.float64
[ 7.13954968e-03  2.33497538e-03 -1.57597373e-03 -2.46922688e-03
  1.36409845e-03 -3.54782344e-03  6.88996067e-03  1.10860886e-03
  1.38812383e-03 -7.19016211e-03  1.45510027e-03  3.02318547e-03
 -7.32138405e-03 -8.43571075e-04  4.28112049e-03  2.40885560e-03
  7.18076241e-03  1.56188999e-03  7.62330334e-04  6.74154006e-04
  3.15126920e-03  5.91587112e-03 -4.05739295e-03 -5.78828923e-03
  1.94070617e-04 -4.62861933e-03 -3.08415149e-04  3.08347898e-04
 -1.03532907e-03  1.76321011e-03  1.00590336e-03  5.19000988e-03
 -6.80118548e-03 -3.83583844e-03  2.23868496e-03 -3.56978474e-03
  4.79782172e-03 -2.77326189e-03  7.04744644e-03  3.53315120e-03
 -3.15971697e-03 -3.91518415e-03 -1.13455609e-03 -9.63908149e-04
  2.64954937e-03  5.46426969e-03  4.37747118e-03  3.15638766e-03
 -1.92890249e-03  5.45232414e-03  3.35068143e-03  4.33478276e-04
 -4.31518024e-03 -6.72472428e-03  4.54341573e-03 -2.88636991e-03
  3.70153548e-03 -6.86245384e-03  4.75460380e-03  6.41603726e-03
 -6.26087807e-03  3.02317812e-03  5.73602283e-03 -4.28468460e-04
  6.01001762e-03 -1.69198965e-03  5.43279470e-03  5.62699022e-03
 -2.88454197e-03 -3.30683965e-03  4.35496712e-03  6.48480591e-03
 -5.20145666e-03 -3.04176444e-03 -4.21770568e-03 -3.28701122e-03
  5.91868810e-03  6.96211411e-03  4.04241174e-03 -4.78452628e-03
  6.74957958e-03  6.22083070e-03 -3.88811704e-04  1.82479643e-03
  5.20908605e-03  2.60155604e-03  6.63469193e-03  8.44931789e-04
 -8.29745518e-04  3.60376697e-03  9.97494182e-04  3.64541569e-03
 -6.90072487e-03  3.80146210e-03 -5.36299117e-03 -6.35306719e-03
  1.03268753e-03  3.54132643e-03 -4.01312104e-03  2.90626746e-03
  1.29579833e-03  5.81664084e-03  2.46191290e-03  5.72887267e-03
  5.45967242e-03  4.87193795e-03  6.61741538e-03 -3.24694377e-03
 -3.05439571e-03 -4.30007078e-03 -4.81643057e-03  5.23719897e-03
 -2.92609792e-03  3.11786964e-03  5.06377992e-03  6.93156574e-04
 -6.34295901e-03 -5.20360405e-03  6.53520633e-03 -2.19878290e-03
 -4.23761225e-03 -4.46902040e-03  3.49561972e-03 -3.77943695e-03
 -1.19474523e-03 -5.53529271e-03 -5.21859423e-03  5.27612485e-04
 -3.22711340e-03  1.97883571e-03 -1.37254251e-03 -4.60044753e-03
 -5.11436754e-03 -4.77549801e-03 -3.34637898e-03 -6.94803859e-03
  2.89079417e-03 -7.18483197e-03 -9.93689973e-05 -4.96657772e-03
  6.76707823e-03 -4.62419960e-03  5.78926771e-03 -5.18438170e-03
 -2.05662931e-03  3.27891351e-04 -8.57164219e-04 -4.31960935e-03
 -4.55563453e-03  7.15334687e-03  1.82591525e-03  7.99936920e-05
  2.45142075e-03  5.73190392e-03 -1.54055161e-03 -1.51643201e-03
 -7.22129499e-03 -5.17472970e-04 -8.08219555e-04 -6.38394953e-03
  5.06663481e-03 -5.11019352e-03  3.69304729e-03  1.81648701e-03
 -8.70826108e-04  5.90869986e-03  2.05946655e-03 -4.32638973e-03
 -2.48755354e-03  1.63676471e-03 -7.25196707e-03  4.79333334e-03
  4.19589915e-03  6.51303768e-04  2.76840533e-03 -2.50836736e-03
 -6.46075359e-03  1.25353015e-03  1.33234235e-03 -2.13799161e-04
 -2.87122657e-04 -4.04575035e-03 -5.93910528e-03  1.06039907e-03
  5.28653467e-04 -5.54096450e-03 -6.83464033e-03 -6.89233810e-03
 -3.02152888e-03  1.23447448e-03  2.23110257e-03 -5.61453445e-03
 -5.80583301e-03 -6.01375088e-04  5.93332855e-03  7.31844717e-03
 -4.38679230e-03 -2.48353842e-03 -4.45933690e-03 -4.31983253e-03
  6.04181330e-03 -2.29214770e-03 -2.65686023e-03  1.07060705e-03
  3.17073542e-04  6.97229815e-03 -2.62600050e-03  3.63202727e-03
  3.41508028e-03 -6.49521498e-03 -9.85550166e-04  4.92309670e-03
 -4.84045929e-03 -6.10941181e-03 -6.26433188e-03  1.96075852e-04
  1.70661732e-03  4.63002121e-03 -3.52289210e-03  2.43618431e-03
 -2.64042274e-03 -5.41311502e-03 -7.33775376e-03 -7.14481501e-04
 -6.46917530e-03 -5.40453482e-03  7.22469128e-03  2.06925380e-03
  1.81981272e-03 -9.21539612e-04  5.48254267e-03 -5.33146979e-03
 -3.48952775e-04  3.75635835e-03 -6.01750009e-03  3.11628044e-03
 -6.52624412e-03  3.47105111e-03 -1.11631997e-03 -3.85723350e-03
 -4.40994989e-03  1.17136573e-03 -3.24804979e-03 -2.00483200e-03
  5.87188180e-04  5.16416785e-03  2.25585266e-03  3.65669899e-04
  5.02527363e-04  1.09367410e-03 -2.92367173e-03  4.60333182e-03
  2.98082391e-03  6.09591745e-03 -2.61550086e-04 -3.48777157e-03]
networks.xnet.scale.coeff: torch.Size([1, 9216]) torch.float64
[[0. 0. 0. ... 0. 0. 0.]]
networks.xnet.scale.layer.weight: torch.Size([9216, 256]) torch.float64
[[-0.0314128   0.02457701 -0.01737903 ...  0.05324728  0.02920562
  -0.01480562]
 [ 0.04872326  0.04276607 -0.03822993 ...  0.04910911 -0.00361292
  -0.0327975 ]
 [-0.007368    0.04980131 -0.03215754 ... -0.00088707  0.04168787
   0.01848812]
 ...
 [-0.02830753 -0.05883997  0.00450329 ...  0.05939779  0.03053409
  -0.00791589]
 [-0.03962345 -0.01745788 -0.00161624 ...  0.03181425 -0.04709787
   0.04662044]
 [-0.05086516  0.01152248 -0.02808823 ...  0.05979598  0.0187143
   0.02237458]]
networks.xnet.scale.layer.bias: torch.Size([9216]) torch.float64
[ 0.05768892 -0.05574379 -0.02721741 ...  0.0429844  -0.0245669
  0.01514744]
networks.xnet.transf.coeff: torch.Size([1, 9216]) torch.float64
[[0. 0. 0. ... 0. 0. 0.]]
networks.xnet.transf.layer.weight: torch.Size([9216, 256]) torch.float64
[[-5.48741770e-02  5.88052983e-02 -4.66361299e-02 ...  4.88506894e-03
   4.86617921e-03 -4.32845085e-02]
 [ 1.52577676e-02 -3.61066089e-02 -2.71776527e-02 ... -3.64660092e-02
  -2.36572372e-02  3.22652308e-02]
 [-1.60658234e-02  1.18504295e-02  5.53077906e-02 ...  2.12042919e-03
  -3.51854507e-02 -2.75998519e-02]
 ...
 [ 5.39795249e-02  5.57953445e-02 -5.10586758e-02 ...  1.91429723e-02
   7.08082998e-05 -5.18016647e-02]
 [ 5.80865506e-03 -6.10658241e-02  2.19656541e-02 ...  3.99923136e-02
  -2.62750389e-02 -4.53906247e-02]
 [-4.22715684e-02  2.91019650e-02 -2.99350749e-02 ...  5.93449722e-03
  -2.18851812e-02 -3.72887999e-02]]
networks.xnet.transf.layer.bias: torch.Size([9216]) torch.float64
[ 0.0160626   0.03637902 -0.04792599 ... -0.02090929  0.03092064
 -0.02681802]
networks.xnet.transl.weight: torch.Size([9216, 256]) torch.float64
[[ 0.0597546   0.04724709  0.00974501 ...  0.03109464  0.05561356
  -0.06129477]
 [-0.00993569  0.01303274  0.03429567 ... -0.0537229   0.02269541
   0.0060408 ]
 [-0.03242319  0.05091612 -0.04957046 ...  0.02764103  0.0523916
   0.00046056]
 ...
 [-0.02372713  0.02379183  0.04659715 ... -0.02335574  0.00308037
   0.0329444 ]
 [ 0.04570751 -0.01026316 -0.02490364 ...  0.06235519 -0.0481135
   0.04846663]
 [-0.01367436  0.02093689  0.00793577 ... -0.03375852  0.03164459
  -0.0471489 ]]
networks.xnet.transl.bias: torch.Size([9216]) torch.float64
[-0.05460216 -0.00158335  0.00726456 ... -0.00648457 -0.03945205
  0.03027343]
networks.vnet.input_layer.xlayer.weight: torch.Size([256, 8192]) torch.float64
[[ 9.16479776e-03 -5.02985351e-03 -1.02646308e-02 ... -5.54809175e-03
   2.49656214e-03  5.17212957e-03]
 [-1.10012665e-02  1.43435235e-03  2.52587170e-03 ...  2.85077495e-03
   7.27665005e-03  1.25115602e-04]
 [-4.05602443e-03  5.24941517e-05  2.08143842e-03 ... -8.23178343e-03
  -1.25323771e-03  1.87069762e-03]
 ...
 [ 1.65006943e-03  5.20871048e-03 -1.00319636e-02 ... -8.65131086e-04
  -3.36906845e-03  9.64724941e-03]
 [-4.45201502e-03 -7.09414820e-03  1.06516265e-02 ... -1.70374089e-03
   5.39770888e-03  1.16063326e-03]
 [-2.58663111e-03 -7.89414569e-03 -3.60060986e-03 ...  2.35312618e-03
   3.82747698e-03  6.42668906e-03]]
networks.vnet.input_layer.xlayer.bias: torch.Size([256]) torch.float64
[ 7.19072073e-03  8.54899036e-03 -2.74573756e-04 -1.00589086e-02
 -1.01545971e-02 -4.92664753e-03  3.33148504e-03 -9.15564118e-03
 -2.42099720e-03  2.73065021e-03  9.08042100e-03  5.53534517e-03
 -7.83710822e-03 -7.49573676e-03  6.42186908e-03  8.03173418e-03
 -5.40192935e-03  6.03333723e-03  7.86975860e-03  9.10750021e-03
  7.69085484e-03  5.75742039e-03  5.05968312e-03 -1.08030831e-02
  3.19505866e-03  4.33518487e-03 -8.13443321e-03  3.26326675e-03
 -2.42816999e-04 -4.49394122e-03 -5.71015658e-03  1.04592739e-02
  8.85128136e-03  9.48734877e-03  9.57133407e-04 -4.04794034e-03
 -5.37902552e-04  6.59268074e-03 -8.24567402e-03 -9.85347305e-03
 -3.42285874e-03 -6.53992756e-03  1.22830194e-03  7.85893246e-03
 -5.83300876e-03  9.63272175e-03  1.00944380e-02 -8.23972365e-03
  6.38082055e-03 -8.30738685e-03 -1.28466574e-03 -5.92072715e-03
 -9.94081339e-04 -5.04786137e-03 -4.81081172e-03  7.90118625e-03
  1.71037484e-03  4.65126599e-03  3.37058664e-03 -4.20112172e-03
  3.59920358e-03  7.24500793e-03 -3.96064082e-03  7.38878616e-03
 -4.49233823e-03  1.05992911e-02  6.25592970e-04 -1.42090120e-03
  9.97637162e-03 -5.69488534e-03  8.76500682e-04 -1.01360952e-02
 -2.37918758e-03  9.16485953e-03 -5.39184741e-04 -9.55458902e-03
  2.15706793e-03  6.87956224e-03  9.80659942e-03  1.44044538e-03
 -1.47955633e-03 -6.09390842e-03  3.60479523e-03 -6.82449391e-03
  5.40907839e-03  1.01111670e-03 -6.58945581e-03  2.83824036e-04
  8.00948478e-03  9.06618547e-03  2.53404918e-03 -2.60766025e-03
  8.65071755e-03  4.25651017e-04  5.35529727e-03  1.99133934e-03
  2.17451416e-03  5.14984483e-03 -4.56392209e-03  2.18987254e-03
  2.15989875e-03  2.69606540e-04  3.48712402e-03  1.87305008e-04
 -3.78771696e-03 -1.00614225e-03  3.22000781e-03  1.19988308e-04
  2.42807845e-03  7.89254160e-03  9.15674794e-03  5.91687571e-03
  1.07615647e-02  1.00567930e-03 -7.01280272e-03  2.52361863e-03
 -2.47741325e-03 -1.00704911e-02 -7.94936428e-03  9.14483937e-03
  6.37746665e-03  7.03343740e-03  5.14637055e-03 -5.11663552e-03
 -8.00740179e-03  8.83483394e-03  6.53089076e-03  9.00223946e-03
  3.97538834e-03  6.24483879e-03  7.31143293e-03 -1.00182043e-02
 -9.54410363e-03 -5.58611888e-03 -2.97078564e-03 -4.44242630e-03
 -1.97398918e-03  2.85262394e-03 -2.06799186e-05 -6.70357733e-03
 -2.88790893e-03  2.64525844e-03 -4.77558126e-03  2.39335661e-03
 -2.24044925e-03 -4.06998727e-03  1.06852282e-02 -4.68211211e-03
 -2.39275674e-05  1.78237780e-03  7.35727064e-03  3.83212756e-03
  1.05362754e-02 -1.04227983e-02  5.49080533e-03 -4.21863781e-03
 -8.09427094e-03  4.07019125e-03 -2.45255795e-03 -2.53837092e-03
 -8.00514087e-03  8.73080505e-03 -8.95351911e-04  5.55943184e-03
  1.05709522e-02  1.08791111e-02 -1.27190779e-03 -6.14560145e-03
  3.24457839e-03  5.82486941e-03  8.54307159e-03  1.00030933e-02
 -1.72313442e-03 -8.25437185e-03 -1.09750088e-02  1.56909026e-03
  1.03776179e-02  6.30748559e-03 -6.85598815e-03 -1.12541891e-03
  9.08712741e-03  8.86892888e-03 -6.65631585e-03  4.50625609e-03
  6.52531325e-03 -7.26732740e-03  1.06536875e-03 -4.89295777e-03
  8.56154583e-03 -1.37062236e-03  7.88639999e-03  4.09090142e-03
  2.70268365e-03  2.09226824e-03 -8.10420964e-03 -5.58339073e-03
 -1.20819768e-03  7.11240267e-03  9.43157146e-03 -3.08990524e-03
  8.97867491e-03  3.34746366e-03  1.09295356e-02 -8.33854547e-03
  8.38929734e-04 -3.84791235e-03 -4.82723130e-03 -3.57815038e-03
 -9.57405736e-03 -1.26486527e-03  1.80339331e-04 -9.96310653e-03
 -5.69077756e-03  1.02989043e-02  7.48580783e-03 -7.39925761e-03
  1.63322755e-03  8.09922551e-03  2.04141394e-03 -2.41658959e-03
 -3.70372619e-04  9.03762479e-03 -4.80271241e-03 -7.08458501e-03
 -5.17033153e-03 -5.79945109e-03  1.24089517e-03 -3.33699629e-03
  3.73987379e-03  9.65713624e-03 -4.10453665e-03  4.65230530e-03
  9.41206321e-03  1.98615070e-03  1.89076444e-04 -1.09669487e-02
 -1.05897327e-02 -6.85746166e-03  9.04909639e-03  3.74210626e-03
  7.73146379e-03 -5.16318588e-03 -2.38919870e-03 -8.10861969e-03
 -2.64679669e-03 -2.54794893e-03 -5.72662559e-03  2.21213630e-03
 -3.11922885e-03 -8.05962367e-03 -5.62810358e-03 -4.50267728e-03
  6.65414399e-03  6.62929741e-03  4.13247675e-04  1.00713824e-02]
networks.vnet.input_layer.vlayer.weight: torch.Size([256, 8192]) torch.float64
[[-1.14911505e-04  1.02167470e-03  1.40021033e-03 ...  1.24707499e-03
   7.39021474e-03 -7.66842841e-05]
 [ 1.94857639e-03 -1.01153101e-02  5.93229540e-03 ... -7.92573245e-04
  -4.16384139e-03 -2.18078512e-03]
 [ 6.67728377e-04  2.17963982e-03  5.26611412e-03 ...  9.58798745e-03
   2.09696865e-03 -9.27834153e-03]
 ...
 [ 6.47180548e-03 -1.07005036e-02  3.93737006e-03 ... -5.18451888e-04
  -6.75962666e-03  3.20640579e-03]
 [ 4.78800955e-03  9.73209930e-03 -8.75317563e-03 ... -1.81879817e-03
   4.40209660e-03 -3.12276728e-03]
 [-8.81313864e-03 -3.99257134e-03  8.84710606e-03 ... -6.06736885e-03
  -7.77339244e-04  7.38524802e-03]]
networks.vnet.input_layer.vlayer.bias: torch.Size([256]) torch.float64
[-8.62724045e-03  1.42222980e-03 -6.96917869e-03 -5.58672282e-03
 -1.08436920e-02  1.92805181e-03  9.85077112e-03 -3.70476600e-03
 -1.09324801e-02 -3.16374460e-03  1.03695714e-02  5.06699318e-05
 -4.99672326e-04 -1.99306211e-03 -2.84156206e-03  4.66317161e-03
 -3.03587761e-03  7.51455137e-03 -2.33996069e-03  2.59240772e-05
  7.89245214e-03 -8.62472647e-04  1.73067098e-03  3.98049967e-03
  1.73062241e-03  6.62573647e-03  7.40045855e-03  2.67644125e-03
 -9.08800186e-03  5.89813445e-03  8.16554978e-03  6.63839573e-03
 -9.42590336e-03 -4.87546360e-04 -3.67365451e-03 -7.04534530e-03
  4.69672830e-03  2.60295575e-03 -5.76011135e-03 -6.28032865e-03
  1.91679316e-03  2.98078939e-03  3.62183797e-03 -3.00271988e-03
  1.63915131e-03  2.34721714e-03  6.65671087e-03 -2.31834993e-03
 -9.18060747e-03 -9.18534600e-03 -2.52645532e-03 -5.11630776e-03
 -1.44994545e-03 -1.03017183e-02 -9.78218895e-03 -1.01023624e-02
  1.18473846e-03  1.06675516e-02 -4.46746439e-03 -8.31554290e-03
  3.62939942e-03 -2.85054999e-03  3.50449313e-04  5.88476446e-03
 -7.41086950e-04 -1.88300384e-03 -9.95598461e-03 -3.86232625e-03
  8.51256252e-03 -4.71688463e-03  5.85945792e-03 -4.08366811e-03
  3.74353387e-03 -7.63856755e-03  6.01017018e-03 -2.08397059e-04
 -5.98487160e-03 -2.90322952e-03 -1.70036238e-03 -9.92108061e-03
 -2.22982220e-03  4.83112948e-03 -2.99896346e-03  3.67172178e-03
 -6.98774964e-04 -8.77047812e-03  1.62358117e-03 -1.78590660e-03
  5.38471356e-03 -3.43516886e-03  8.14706400e-03 -5.83946133e-03
 -3.43393746e-03  7.73027246e-03  6.63517574e-04 -1.97393104e-03
 -6.85942244e-03  9.00629517e-03  7.97936788e-03 -9.46204611e-03
 -9.91962740e-04 -1.10322243e-02  2.51705047e-03  9.34333540e-03
 -2.04383936e-03 -3.20343216e-03  9.29710695e-03 -3.91017821e-03
  6.39077050e-03  6.43472117e-03  9.74522830e-03 -6.93840319e-03
 -2.08547302e-03  1.38644474e-03  9.37315176e-03  4.56605920e-03
 -7.89351592e-03  9.38851564e-03  5.63796455e-03 -9.00095278e-03
  5.18531879e-03  5.30712437e-03 -8.30732223e-03 -7.40450098e-03
 -8.20939741e-03 -2.84902854e-03 -4.78296110e-03  4.23491318e-03
 -6.74215327e-04  7.31200359e-03  5.35229365e-03  5.08346832e-03
  4.55305812e-04 -9.43407806e-03 -3.95355860e-03  4.08208203e-03
  9.17557942e-03 -5.82235799e-03 -2.40188538e-03  5.86607196e-03
  9.26253560e-04  3.60195632e-04  5.37108378e-03  9.55347231e-03
  2.21315472e-03  2.83197797e-03  2.42350609e-03  4.04436381e-03
  5.66261568e-03  9.10928763e-03 -9.95752380e-03  9.69473082e-03
 -7.39153944e-04  3.66869476e-03 -1.01362383e-02  9.56476828e-03
 -2.82710541e-04  7.12206993e-03 -5.87929074e-04 -7.01050166e-03
 -4.23527215e-03 -8.87308949e-03 -1.39642809e-04  9.37101367e-03
  9.67923506e-05  1.99839363e-03 -8.40962420e-03  8.90230844e-03
 -1.01463100e-02 -1.92307743e-03  4.56673532e-03 -6.56275075e-03
  9.59734817e-04  1.02384073e-02 -5.02270952e-03  8.87749118e-03
 -1.08131623e-02 -4.46696784e-03  5.71411620e-04 -8.43994833e-03
  4.43985783e-03  1.97354673e-03 -1.01913055e-02 -4.17295006e-03
 -4.20001283e-03 -4.55569830e-03 -4.80213356e-03  4.86272493e-03
  4.45352684e-03 -2.98058407e-04 -2.77590550e-04  7.66098638e-03
  5.67088367e-03 -1.01337152e-02 -8.72584257e-03 -6.31807112e-03
  1.36405857e-03  2.23078281e-03 -7.77014046e-03 -5.62621277e-03
  1.21089554e-03 -8.25528093e-03 -4.31437392e-03  6.83876641e-03
 -2.20724004e-03 -6.96057323e-03  1.10153134e-02  9.20152266e-03
 -5.46451809e-04  3.95414653e-03 -6.12336496e-03 -1.01812970e-02
  2.58442604e-03  3.99579709e-03  2.15366084e-03  3.09138548e-03
 -1.98773575e-03  9.45093202e-03  4.53305893e-03 -4.35642434e-03
 -9.85531045e-04 -2.68568653e-03 -1.70380022e-03 -5.19579726e-03
 -1.04297413e-02  5.08077444e-03  8.74058023e-03  6.00930085e-03
  1.54289208e-03  6.84188667e-03 -9.08237877e-03  4.24600744e-03
  9.33722584e-06 -3.77439462e-03  8.54248635e-03 -7.56578027e-03
 -6.76504009e-03 -5.90847587e-03  4.09332418e-03  3.55476762e-03
  5.98836902e-04 -7.42746846e-03  9.41937684e-03  5.74515692e-03
  9.33460074e-03  1.01823407e-02  4.62239994e-03  4.47014706e-04
 -1.10312440e-02 -1.09556414e-02  1.00085173e-02  7.71189410e-03
 -8.37871661e-03  1.06354341e-03  1.81342397e-03  1.59670919e-03]
networks.vnet.scale.coeff: torch.Size([1, 9216]) torch.float64
[[0. 0. 0. ... 0. 0. 0.]]
networks.vnet.scale.layer.weight: torch.Size([9216, 256]) torch.float64
[[-0.00228215 -0.03766236  0.00310601 ...  0.03574621 -0.03147162
   0.05768579]
 [ 0.04740894  0.05251629  0.05302966 ...  0.03484848 -0.02955205
   0.06163016]
 [ 0.01222568  0.00514609 -0.0334526  ...  0.01297469 -0.02803657
   0.06087524]
 ...
 [ 0.05592883 -0.0102075  -0.05230017 ... -0.00338015 -0.00971749
  -0.02981181]
 [-0.04028498  0.04087614  0.04200986 ... -0.02805969  0.03686787
   0.04125376]
 [ 0.00587553  0.01966032 -0.00264878 ...  0.04938714  0.054406
  -0.00516837]]
networks.vnet.scale.layer.bias: torch.Size([9216]) torch.float64
[ 0.00492612  0.05642332  0.01745884 ...  0.05758128  0.03186792
 -0.00131235]
networks.vnet.transf.coeff: torch.Size([1, 9216]) torch.float64
[[0. 0. 0. ... 0. 0. 0.]]
networks.vnet.transf.layer.weight: torch.Size([9216, 256]) torch.float64
[[-0.04669794 -0.02162502 -0.05433034 ...  0.05776646 -0.04041847
  -0.03207429]
 [ 0.01005234 -0.04020476 -0.04877582 ...  0.05317922  0.01688223
   0.00213256]
 [-0.05261319  0.0362059  -0.04666302 ... -0.02129076 -0.03451092
  -0.01450992]
 ...
 [ 0.00717067  0.04310912  0.04005595 ... -0.01275533 -0.0141606
  -0.04939128]
 [-0.02057675 -0.04490391 -0.0232821  ...  0.02815117 -0.03451967
  -0.03208819]
 [ 0.04748397 -0.05835794 -0.04562179 ...  0.02394428  0.00509771
   0.01316225]]
networks.vnet.transf.layer.bias: torch.Size([9216]) torch.float64
[ 0.00330597  0.02071309  0.00124246 ...  0.03318861 -0.03303616
 -0.04279837]
networks.vnet.transl.weight: torch.Size([9216, 256]) torch.float64
[[ 0.00344245 -0.04238582 -0.04490574 ... -0.02783231 -0.04849146
   0.01404108]
 [ 0.02450462  0.04666956  0.03030368 ...  0.04106765 -0.00071757
   0.00888672]
 [-0.00462819 -0.04935891 -0.008225   ...  0.00929701 -0.01122217
   0.01513675]
 ...
 [ 0.05063408  0.00755534 -0.00132726 ... -0.00643975  0.02790855
   0.01580877]
 [ 0.01441602 -0.00605052  0.05652107 ...  0.01595805 -0.03783348
   0.0410961 ]
 [ 0.00295196 -0.03321374  0.05530547 ...  0.02630581  0.04170922
  -0.04991051]]
networks.vnet.transl.bias: torch.Size([9216]) torch.float64
[ 0.04973598 -0.01250011 -0.00308125 ...  0.04882267  0.00540407
 -0.03680365]
xeps.0: torch.Size([]) torch.float64
0.01
xeps.1: torch.Size([]) torch.float64
0.01
xeps.2: torch.Size([]) torch.float64
0.01
xeps.3: torch.Size([]) torch.float64
0.01
veps.0: torch.Size([]) torch.float64
0.01
veps.1: torch.Size([]) torch.float64
0.01
veps.2: torch.Size([]) torch.float64
0.01
veps.3: torch.Size([]) torch.float64
0.01
[2025-04-30 15:43:03][I][pytorch/trainer:2006:l2hmc.trainers.pytorch.trainer] --------------------------------------------------------------------------------
[2025-04-30 15:43:03][I][ipykernel_24193/3178487732:2:ezpz.log] ExperimentConfig(wandb={'setup': {'id': None, 'group': None, 'config': None, 'save_code': True, 'sync_tensorboard': True, 'mode': 'online', 'resume': 'allow', 'entity': 'l2hmc-qcd', 'project': 'l2hmc-qcd', 'settings': {'start_method': 'thread'}, 'tags': ['beta_init=6.0', 'beta_final=6.0']}}, steps=Steps(nera=1, nepoch=10, test=50, log=1, print=1, extend_last_era=1), framework='pytorch', loss=LossConfig(use_mixed_loss=True, charge_weight=0.0, rmse_weight=0.1, plaq_weight=0.1, aux_weight=0.0), network=NetworkConfig(units=[256], activation_fn='tanh', dropout_prob=0.0, use_batch_norm=False), conv=ConvolutionConfig(filters=[], sizes=[], pool=[]), net_weights=NetWeights(x=NetWeight(s=0.0, t=1.0, q=1.0), v=NetWeight(s=1.0, t=1.0, q=1.0)), dynamics=DynamicsConfig(nchains=8, group='SU3', latvolume=[4, 4, 4, 4], nleapfrog=4, eps=0.01, eps_hmc=0.25, use_ncp=True, verbose=True, eps_fixed=False, use_split_xnets=False, use_separate_networks=False, merge_directions=True), learning_rate=LearningRateConfig(lr_init=0.0001, mode='auto', monitor='loss', patience=5, cooldown=0, warmup=1000, verbose=True, min_lr=1e-06, factor=0.98, min_delta=0.0001, clip_norm=1.0), annealing_schedule=AnnealingSchedule(beta_init=6.0, beta_final=6.0, dynamic=False), gradient_accumulation_steps=1, restore=False, save=False, c1=0.0, port='2345', compile=True, profile=False, init_aim=False, init_wandb=False, use_wandb=False, use_tb=False, debug_mode=False, default_mode=True, print_config=True, precision='float32', ignore_warnings=True, backend='DDP', seed=9992, ds_config_path='/Users/samforeman/projects/saforem2/l2hmc-qcd/src/l2hmc/conf/ds_config.yaml', name=None, width=200, nchains=None, compression=False)

HMC

xhmc, history_hmc = evaluate(
    nsteps=50,
    exp=ptExpSU3,
    beta=6.0,
    x=state.x,
    eps=0.1,
    nleapfrog=8,
    job_type='hmc',
    nlog=1,
    nprint=50,
    grab=True
)
Output:
[2025-04-30 15:43:31][I][pytorch/experiment:117:l2hmc.experiment.pytorch.experiment] Running 50 steps of hmc at beta=6.0000
[2025-04-30 15:43:31][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 0
[2025-04-30 15:43:31][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 1
[2025-04-30 15:43:31][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 2
[2025-04-30 15:43:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 3
[2025-04-30 15:43:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 4
[2025-04-30 15:43:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 5
[2025-04-30 15:43:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 6
[2025-04-30 15:43:33][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 7
[2025-04-30 15:43:33][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 8
[2025-04-30 15:43:33][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 9
[2025-04-30 15:43:33][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 10
[2025-04-30 15:43:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 11
[2025-04-30 15:43:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 12
[2025-04-30 15:43:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 13
[2025-04-30 15:43:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 14
[2025-04-30 15:43:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 15
[2025-04-30 15:43:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 16
[2025-04-30 15:43:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 17
[2025-04-30 15:43:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 18
[2025-04-30 15:43:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 19
[2025-04-30 15:43:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 20
[2025-04-30 15:43:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 21
[2025-04-30 15:43:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 22
[2025-04-30 15:43:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 23
[2025-04-30 15:43:37][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 24
[2025-04-30 15:43:37][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 25
[2025-04-30 15:43:37][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 26
[2025-04-30 15:43:37][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 27
[2025-04-30 15:43:38][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 28
[2025-04-30 15:43:38][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 29
[2025-04-30 15:43:38][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 30
[2025-04-30 15:43:38][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 31
[2025-04-30 15:43:39][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 32
[2025-04-30 15:43:39][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 33
[2025-04-30 15:43:39][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 34
[2025-04-30 15:43:39][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 35
[2025-04-30 15:43:40][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 36
[2025-04-30 15:43:40][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 37
[2025-04-30 15:43:40][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 38
[2025-04-30 15:43:40][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 39
[2025-04-30 15:43:41][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 40
[2025-04-30 15:43:41][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 41
[2025-04-30 15:43:41][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 42
[2025-04-30 15:43:41][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 43
[2025-04-30 15:43:42][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 44
[2025-04-30 15:43:42][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 45
[2025-04-30 15:43:42][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 46
[2025-04-30 15:43:42][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 47
[2025-04-30 15:43:43][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 48
[2025-04-30 15:43:43][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 49
dataset_hmc = history_hmc.get_dataset()
_ = history_hmc.plot_all(title='HMC')
Output:

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg
xhmc = ptExpSU3.trainer.dynamics.unflatten(xhmc)
print(f"checkSU(x_eval): {g.checkSU(xhmc)}")
print(f"checkSU(x_eval): {g.checkSU(g.projectSU(xhmc))}")
Output:
checkSU(x_eval): (tensor[8] f64 xโˆˆ[2.314e-16, 5.581e-16] ฮผ=3.985e-16 ฯƒ=1.437e-16 [5.220e-16, 5.581e-16, 2.338e-16,
5.152e-16, 5.239e-16, 2.825e-16, 2.314e-16, 3.214e-16], tensor[8] f64 xโˆˆ[6.633e-16, 1.660e-15] ฮผ=1.198e-15
ฯƒ=3.983e-16 [1.438e-15, 1.657e-15, 6.633e-16, 1.660e-15, 1.449e-15, 8.822e-16, 8.161e-16, 1.014e-15])
checkSU(x_eval): (tensor[8] f64 xโˆˆ[2.099e-16, 3.202e-16] ฮผ=2.785e-16 ฯƒ=4.123e-17 [3.078e-16, 3.202e-16, 2.099e-16,
3.114e-16, 3.049e-16, 2.593e-16, 2.294e-16, 2.850e-16], tensor[8] f64 xโˆˆ[7.639e-16, 9.703e-16] ฮผ=8.714e-16
ฯƒ=8.022e-17 [9.119e-16, 9.703e-16, 7.805e-16, 9.469e-16, 9.134e-16, 7.639e-16, 7.940e-16, 8.903e-16])

Training

import time
from l2hmc.utils.history import BaseHistory, summarize_dict

history_train = BaseHistory()
x = state.x
for step in range(100):
# log.info(f'HMC STEP: {step}')
tic = time.perf_counter()
x, metrics_ = ptExpSU3.trainer.train_step(
    (x, state.beta)
)
toc = time.perf_counter()
metrics = {
    'train_step': step,
    'dt': toc - tic,
    **metrics_,
}
if step % 5 == 0:
    avgs = history_train.update(metrics)
    summary = summarize_dict(avgs)
    logger.info(summary)
Output:
[2025-04-30 15:45:44][I][ipykernel_24193/30352159:21:ezpz.log] train_step=0 dt=1.577 energy=33.403 logprob=33.488 logdet=-0.085 sldf=-0.065 sldb=0.086 sld=-0.085 xeps=0.010 veps=0.010 acc=0.131 sumlogdet=0.000 beta=6.000 acc_mask=0.000 loss=88.118 plaqs=-0.004 sinQ=-0.002 intQ=-0.026 dQint=0.000 dQsin=0.000
[2025-04-30 15:45:51][I][ipykernel_24193/30352159:21:ezpz.log] train_step=5 dt=1.256 energy=-372.669 logprob=-372.955 logdet=0.285 sldf=0.191 sldb=-0.217 sld=0.285 xeps=0.010 veps=0.010 acc=1.000 sumlogdet=-0.091 beta=6.000 acc_mask=1.000 loss=-647.866 plaqs=0.038 sinQ=-0.002 intQ=-0.034 dQint=0.009 dQsin=0.001
[2025-04-30 15:45:58][I][ipykernel_24193/30352159:21:ezpz.log] train_step=10 dt=1.308 energy=-780.851 logprob=-781.247 logdet=0.396 sldf=0.254 sldb=-0.276 sld=0.396 xeps=0.011 veps=0.010 acc=0.952 sumlogdet=-0.044 beta=6.000 acc_mask=1.000 loss=-538.541 plaqs=0.088 sinQ=-0.002 intQ=-0.036 dQint=0.017 dQsin=0.001
[2025-04-30 15:46:05][I][ipykernel_24193/30352159:21:ezpz.log] train_step=15 dt=1.487 energy=-1234.642 logprob=-1235.128 logdet=0.486 sldf=0.303 sldb=-0.298 sld=0.486 xeps=0.011 veps=0.010 acc=1.000 sumlogdet=0.011 beta=6.000 acc_mask=1.000 loss=-617.539 plaqs=0.134 sinQ=-0.003 intQ=-0.038 dQint=0.022 dQsin=0.002
[2025-04-30 15:46:13][I][ipykernel_24193/30352159:21:ezpz.log] train_step=20 dt=1.424 energy=-1559.334 logprob=-1559.934 logdet=0.600 sldf=0.370 sldb=-0.363 sld=0.600 xeps=0.012 veps=0.010 acc=1.000 sumlogdet=0.029 beta=6.000 acc_mask=1.000 loss=-450.278 plaqs=0.169 sinQ=0.000 intQ=0.005 dQint=0.016 dQsin=0.001
[2025-04-30 15:46:20][I][ipykernel_24193/30352159:21:ezpz.log] train_step=25 dt=1.611 energy=-1901.485 logprob=-1902.100 logdet=0.615 sldf=0.383 sldb=-0.382 sld=0.615 xeps=0.012 veps=0.010 acc=0.766 sumlogdet=0.082 beta=6.000 acc_mask=0.625 loss=-306.696 plaqs=0.203 sinQ=-0.001 intQ=-0.018 dQint=0.013 dQsin=0.001
[2025-04-30 15:46:28][I][ipykernel_24193/30352159:21:ezpz.log] train_step=30 dt=1.358 energy=-2024.281 logprob=-2025.102 logdet=0.821 sldf=0.522 sldb=-0.530 sld=0.821 xeps=0.013 veps=0.010 acc=0.751 sumlogdet=-0.083 beta=6.000 acc_mask=0.750 loss=-136.896 plaqs=0.224 sinQ=0.001 intQ=0.016 dQint=0.012 dQsin=0.001
[2025-04-30 15:46:36][I][ipykernel_24193/30352159:21:ezpz.log] train_step=35 dt=1.532 energy=-2218.666 logprob=-2219.663 logdet=0.997 sldf=0.624 sldb=-0.624 sld=0.997 xeps=0.013 veps=0.011 acc=0.673 sumlogdet=0.001 beta=6.000 acc_mask=0.625 loss=-26.350 plaqs=0.242 sinQ=0.002 intQ=0.033 dQint=0.012 dQsin=0.001
[2025-04-30 15:46:44][I][ipykernel_24193/30352159:21:ezpz.log] train_step=40 dt=1.553 energy=-2388.089 logprob=-2389.183 logdet=1.093 sldf=0.706 sldb=-0.754 sld=1.093 xeps=0.013 veps=0.011 acc=0.386 sumlogdet=-0.040 beta=6.000 acc_mask=0.375 loss=239.300 plaqs=0.259 sinQ=0.002 intQ=0.029 dQint=0.012 dQsin=0.001
[2025-04-30 15:46:52][I][ipykernel_24193/30352159:21:ezpz.log] train_step=45 dt=1.609 energy=-2497.524 logprob=-2498.931 logdet=1.408 sldf=0.886 sldb=-0.906 sld=1.408 xeps=0.013 veps=0.011 acc=0.414 sumlogdet=0.014 beta=6.000 acc_mask=0.375 loss=76.319 plaqs=0.274 sinQ=0.002 intQ=0.026 dQint=0.003 dQsin=0.000
[2025-04-30 15:46:59][I][ipykernel_24193/30352159:21:ezpz.log] train_step=50 dt=1.623 energy=-2696.907 logprob=-2698.606 logdet=1.699 sldf=1.064 sldb=-1.083 sld=1.699 xeps=0.013 veps=0.012 acc=0.665 sumlogdet=0.006 beta=6.000 acc_mask=0.750 loss=-231.184 plaqs=0.293 sinQ=0.002 intQ=0.036 dQint=0.013 dQsin=0.001
[2025-04-30 15:47:07][I][ipykernel_24193/30352159:21:ezpz.log] train_step=55 dt=1.554 energy=-2865.825 logprob=-2867.770 logdet=1.945 sldf=1.209 sldb=-1.215 sld=1.945 xeps=0.013 veps=0.012 acc=0.770 sumlogdet=0.153 beta=6.000 acc_mask=0.750 loss=-210.427 plaqs=0.311 sinQ=0.003 intQ=0.041 dQint=0.016 dQsin=0.001
[2025-04-30 15:47:15][I][ipykernel_24193/30352159:21:ezpz.log] train_step=60 dt=1.709 energy=-2985.928 logprob=-2987.915 logdet=1.987 sldf=1.251 sldb=-1.296 sld=1.987 xeps=0.013 veps=0.012 acc=1.000 sumlogdet=-0.029 beta=6.000 acc_mask=1.000 loss=-278.412 plaqs=0.323 sinQ=0.003 intQ=0.043 dQint=0.010 dQsin=0.001
[2025-04-30 15:47:23][I][ipykernel_24193/30352159:21:ezpz.log] train_step=65 dt=1.557 energy=-3155.115 logprob=-3157.112 logdet=1.997 sldf=1.252 sldb=-1.281 sld=1.997 xeps=0.013 veps=0.013 acc=1.000 sumlogdet=0.006 beta=6.000 acc_mask=1.000 loss=-363.215 plaqs=0.342 sinQ=0.002 intQ=0.025 dQint=0.021 dQsin=0.001
[2025-04-30 15:47:30][I][ipykernel_24193/30352159:21:ezpz.log] train_step=70 dt=1.474 energy=-3309.328 logprob=-3311.369 logdet=2.041 sldf=1.279 sldb=-1.318 sld=2.041 xeps=0.013 veps=0.013 acc=1.000 sumlogdet=-0.011 beta=6.000 acc_mask=1.000 loss=-374.258 plaqs=0.360 sinQ=-0.000 intQ=-0.003 dQint=0.018 dQsin=0.001
[2025-04-30 15:47:37][I][ipykernel_24193/30352159:21:ezpz.log] train_step=75 dt=1.599 energy=-3493.874 logprob=-3496.069 logdet=2.195 sldf=1.355 sldb=-1.349 sld=2.195 xeps=0.013 veps=0.014 acc=1.000 sumlogdet=0.130 beta=6.000 acc_mask=1.000 loss=-380.764 plaqs=0.378 sinQ=0.001 intQ=0.022 dQint=0.011 dQsin=0.001
[2025-04-30 15:47:45][I][ipykernel_24193/30352159:21:ezpz.log] train_step=80 dt=1.682 energy=-3625.884 logprob=-3627.857 logdet=1.973 sldf=1.234 sldb=-1.251 sld=1.973 xeps=0.013 veps=0.015 acc=1.000 sumlogdet=0.019 beta=6.000 acc_mask=1.000 loss=-442.163 plaqs=0.396 sinQ=-0.000 intQ=-0.003 dQint=0.020 dQsin=0.001
[2025-04-30 15:47:53][I][ipykernel_24193/30352159:21:ezpz.log] train_step=85 dt=1.588 energy=-3841.913 logprob=-3843.908 logdet=1.995 sldf=1.230 sldb=-1.213 sld=1.995 xeps=0.012 veps=0.015 acc=1.000 sumlogdet=0.146 beta=6.000 acc_mask=1.000 loss=-556.948 plaqs=0.416 sinQ=0.000 intQ=0.004 dQint=0.008 dQsin=0.001
[2025-04-30 15:48:01][I][ipykernel_24193/30352159:21:ezpz.log] train_step=90 dt=1.512 energy=-4034.155 logprob=-4035.872 logdet=1.718 sldf=1.085 sldb=-1.128 sld=1.718 xeps=0.012 veps=0.016 acc=1.000 sumlogdet=-0.056 beta=6.000 acc_mask=1.000 loss=-518.971 plaqs=0.435 sinQ=-0.000 intQ=-0.001 dQint=0.016 dQsin=0.001
[2025-04-30 15:48:09][I][ipykernel_24193/30352159:21:ezpz.log] train_step=95 dt=1.604 energy=-4136.090 logprob=-4137.796 logdet=1.707 sldf=1.071 sldb=-1.091 sld=1.707 xeps=0.012 veps=0.016 acc=1.000 sumlogdet=-0.034 beta=6.000 acc_mask=1.000 loss=-613.025 plaqs=0.455 sinQ=-0.001 intQ=-0.008 dQint=0.012 dQsin=0.001
dataset_train = history_train.get_dataset()
_ = history_train.plot_all(
    title='Train',
    num_chains=x.shape[0],
)
Output:

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

Evaluation

# state = ptExpSU3.trainer.dynamics.random_state(6.0)
xeval, history_eval = evaluate(
    nsteps=50,
    exp=ptExpSU3,
    beta=6.0,
# x=state.x,
    job_type='eval',
    nlog=1,
    nprint=50,
    grab=True,
)
Output:
[2025-04-30 15:56:12][I][pytorch/experiment:117:l2hmc.experiment.pytorch.experiment] Running 50 steps of eval at beta=6.0000
[2025-04-30 15:56:12][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 0
[2025-04-30 15:56:12][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 1
[2025-04-30 15:56:13][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 2
[2025-04-30 15:56:14][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 3
[2025-04-30 15:56:14][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 4
[2025-04-30 15:56:15][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 5
[2025-04-30 15:56:15][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 6
[2025-04-30 15:56:16][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 7
[2025-04-30 15:56:16][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 8
[2025-04-30 15:56:17][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 9
[2025-04-30 15:56:17][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 10
[2025-04-30 15:56:18][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 11
[2025-04-30 15:56:18][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 12
[2025-04-30 15:56:19][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 13
[2025-04-30 15:56:19][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 14
[2025-04-30 15:56:20][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 15
[2025-04-30 15:56:20][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 16
[2025-04-30 15:56:21][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 17
[2025-04-30 15:56:21][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 18
[2025-04-30 15:56:22][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 19
[2025-04-30 15:56:22][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 20
[2025-04-30 15:56:23][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 21
[2025-04-30 15:56:23][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 22
[2025-04-30 15:56:24][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 23
[2025-04-30 15:56:24][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 24
[2025-04-30 15:56:24][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 25
[2025-04-30 15:56:25][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 26
[2025-04-30 15:56:25][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 27
[2025-04-30 15:56:26][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 28
[2025-04-30 15:56:26][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 29
[2025-04-30 15:56:27][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 30
[2025-04-30 15:56:27][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 31
[2025-04-30 15:56:28][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 32
[2025-04-30 15:56:28][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 33
[2025-04-30 15:56:29][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 34
[2025-04-30 15:56:29][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 35
[2025-04-30 15:56:29][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 36
[2025-04-30 15:56:30][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 37
[2025-04-30 15:56:30][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 38
[2025-04-30 15:56:31][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 39
[2025-04-30 15:56:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 40
[2025-04-30 15:56:32][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 41
[2025-04-30 15:56:33][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 42
[2025-04-30 15:56:33][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 43
[2025-04-30 15:56:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 44
[2025-04-30 15:56:34][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 45
[2025-04-30 15:56:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 46
[2025-04-30 15:56:35][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 47
[2025-04-30 15:56:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 48
[2025-04-30 15:56:36][I][pytorch/experiment:121:l2hmc.experiment.pytorch.experiment] STEP: 49
dataset_eval = history_eval.get_dataset()
_ = history_eval.plot_all(title='Eval')
Output:

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg

svg
xeval = ptExpSU3.trainer.dynamics.unflatten(xeval)
logger.info(f"checkSU(x_eval): {g.checkSU(xeval)}")
logger.info(f"checkSU(x_eval): {g.checkSU(g.projectSU(xeval))}")
Output:
[2025-04-30 16:08:39][I][ipykernel_24193/2193937887:2:ezpz.log] checkSU(x_eval): (tensor[8] f64 xโˆˆ[1.387e-16, 1.458e-16] ฮผ=1.433e-16 ฯƒ=2.311e-18 [1.425e-16, 1.457e-16, 1.387e-16, 1.458e-16, 1.436e-16, 1.422e-16, 1.432e-16, 1.447e-16], tensor[8] f64 xโˆˆ[5.173e-16, 7.840e-16] ฮผ=6.551e-16 ฯƒ=1.029e-16 [7.840e-16, 6.628e-16, 5.173e-16, 6.288e-16, 6.088e-16, 7.452e-16, 5.299e-16, 7.638e-16])
[2025-04-30 16:08:39][I][ipykernel_24193/2193937887:3:ezpz.log] checkSU(x_eval): (tensor[8] f64 xโˆˆ[1.352e-16, 1.451e-16] ฮผ=1.409e-16 ฯƒ=3.802e-18 [1.451e-16, 1.352e-16, 1.431e-16, 1.416e-16, 1.366e-16, 1.445e-16, 1.378e-16, 1.430e-16], tensor[8] f64 xโˆˆ[4.654e-16, 7.451e-16] ฮผ=5.900e-16 ฯƒ=9.667e-17 [7.451e-16, 6.309e-16, 4.961e-16, 6.874e-16, 4.654e-16, 6.100e-16, 5.204e-16, 5.651e-16])
import matplotlib.pyplot as plt
pdiff = dataset_eval.plaqs - dataset_hmc.plaqs
pdiff
import xarray as xr

fig, ax = plt.subplots(figsize=(12, 4))
(pdiff ** 2).plot(ax=ax)  #, robust=True)
ax.set_title(r"$\left|\delta U_{\mu\nu}\right|^{2}$ (HMC - Eval)")
outfile = Path(EVAL_DIR).joinpath('pdiff.svg')
#%xmode fig.savefig(outfile.as_posix(), dpi=400, bbox_inches='tight')
plt.show()
Output:

svg

svg

Citation

BibTeX citation:
@online{foreman2023,
  author = {Foreman, Sam},
  title = {L2hmc-Qcd {Example:} {4D} {SU(3)}},
  date = {2023-12-06},
  url = {https://samforeman.me/posts/ai-for-physics/l2hmc-qcd/4dSU3},
  langid = {en}
}
For attribution, please cite this work as:
Foreman, Sam. 2023. โ€œL2hmc-Qcd Example: 4D SU(3).โ€ December 6, 2023. https://samforeman.me/posts/ai-for-physics/l2hmc-qcd/4dSU3.