09: Add argparse interface

This commit is contained in:
2022-11-17 12:07:32 +01:00
parent 46c794c1cb
commit b8235bce5b
2 changed files with 79 additions and 19 deletions

View File

@ -231,6 +231,33 @@
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e506d9a5",
"metadata": {},
"outputs": [],
"source": [
"import h5py\n",
"import numpy as np\n",
"from matplotlib import pyplot as plt\n",
"\n",
"# Load data\n",
"output_filename = \"preliminary_simulation.h5\"\n",
"with h5py.File(output_filename,'r') as f:\n",
" handler = f[\"mean-magn\"]\n",
" mean_magn = np.array(handler)\n",
" kappas = handler.attrs[\"kappas\"]\n",
"\n",
"# Plotterdeplotterdeplot\n",
"plt.figure()\n",
"plt.errorbar(kappas,[m[0] for m in mean_magn],yerr=[m[1] for m in mean_magn],fmt='-o')\n",
"plt.xlabel(r\"$\\kappa$\")\n",
"plt.ylabel(r\"$|m|$\")\n",
"plt.title(r\"Absolute field average on $3^4$ lattice, $\\lambda = 1.5$\")\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"id": "d4640925",

View File

@ -1,3 +1,5 @@
__doc__ = "Simulates a lattice scalar field using the Metropolis-Hastings algorithm."
import numpy as np
import argparse
import time
@ -49,33 +51,64 @@ def batch_estimate(data,observable,k):
return np.mean(values), np.std(values)/np.sqrt(k-1)
def main():
lamb = 1.5
kappas = np.linspace(0.08,0.18,11)
width = 3
# use the argparse package to parse command line arguments
parser = argparse.ArgumentParser(description=__doc__)
# TODO: Describe what lambda, delta, and kappa really mean.
parser.add_argument('-l', type=float, default=1.5, help='lambda')
parser.add_argument('-k', type=float, default=0.08, help='kappa')
parser.add_argument('-w', type=int, default=3, help='Width w of the square lattice.')
# delta = 1.5 chosen to have ~ 50% acceptance
parser.add_argument('-d', type=int, default=1.5, help='delta')
parser.add_argument('-n', type=int, help='Number N of measurements (indefinite by default)')
parser.add_argument('-e', type=int, default=100, help='Number E of equilibration sweeps')
parser.add_argument('-m', type=int, default=2, help='Number M of sweeps per measurement')
parser.add_argument('-o', type=int, default=30, help='Time in seconds between file outputs')
parser.add_argument('-f', help='Output filename')
args = parser.parse_args()
# perform sanity checks on the arguments
if args.w is None or args.w < 1:
parser.error("Please specify a positive lattice size!")
if args.k is None or args.k <= 0.0:
parser.error("Please specify a positive kappa!")
if args.e < 10:
parser.error("Need at least 10 equilibration sweeps to determine the average cluster size")
if args.d < 1:
parser.error("Delta should be >= 1.")
# fix parameters
lamb = args.l
kappa = args.k
width = args.w
num_sites = width**4
delta = 1.5 # chosen to have ~ 50% acceptance
equil_sweeps = 10
measure_sweeps = 2
measurements = 20
delta = args.d
equil_sweeps = args.e
measure_sweeps = args.m
measurements = args.n
if args.f is None:
# construct a filename from the parameters plus a timestamp (to avoid overwriting)
output_filename = "data_l{}_k{}_w{}_d{}_{}.hdf5".format(lamb,kappa,width,delta,time.strftime("%Y%m%d%H%M%S"))
else:
output_filename = args.f
# Measure
# TODO: Does mean_magn need to be a list?
mean_magn = []
for kappa in kappas:
phi_state = np.zeros((width,width,width,width))
run_scalar_MH(phi_state,lamb,kappa,delta,equil_sweeps * num_sites)
magnetizations = np.empty(measurements)
for i in range(measurements):
run_scalar_MH(phi_state,lamb,kappa,delta,measure_sweeps * num_sites)
magnetizations[i] = np.mean(phi_state)
mean, err = batch_estimate(np.abs(magnetizations),lambda x:np.mean(x),10)
mean_magn.append([mean,err])
phi_state = np.zeros((width,width,width,width))
run_scalar_MH(phi_state,lamb,kappa,delta,equil_sweeps * num_sites)
magnetizations = np.empty(measurements)
for i in range(measurements):
run_scalar_MH(phi_state,lamb,kappa,delta,measure_sweeps * num_sites)
magnetizations[i] = np.mean(phi_state)
mean, err = batch_estimate(np.abs(magnetizations),lambda x:np.mean(x),10)
mean_magn.append([mean,err])
output_filename = 'preliminary_simulation.h5'
with h5py.File(output_filename,'a') as f:
if not "mean-magn" in f:
dataset = f.create_dataset("mean-magn", chunks=True, data=mean_magn)
# store some information as metadata for the data set
dataset.attrs["lamb"] = lamb
dataset.attrs["kappas"] = kappas
dataset.attrs["kappa"] = kappa
dataset.attrs["width"] = width
dataset.attrs["num_sites"] = num_sites
dataset.attrs["delta"] = delta