Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

rewrite refresh_vars for real parameters #40

Open
wants to merge 7 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 24 additions & 7 deletions fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def load_config(config_file="config.yml", total_same=False):
return MultiConfig(config_files, total_same=total_same)


def fit(config, init_params="", method="BFGS", loop=1, maxiter=500):
def fit(config, vm, init_params="", method="BFGS", loop=1, maxiter=500):
"""
simple fit script
"""
Expand All @@ -59,6 +59,8 @@ def fit(config, init_params="", method="BFGS", loop=1, maxiter=500):
print("\nusing RANDOM parameters", flush=True)
# try to fit
try:
# vm.rp2xy_all()
vm.std_polar_all()
fit_result = config.fit(
batch=65000, method=method, maxiter=maxiter
)
Expand Down Expand Up @@ -139,6 +141,7 @@ def write_some_results_combine(config, fit_result, save_root=False):

for it, config_i in enumerate(config.configs):
print("########## fit fractions {}:".format(it))
print(f"nll{it}", config_i.get_fcn()({}).numpy())
mcdata = config_i.get_phsp_noeff()
fit_frac, err_frac = fit_fractions(
config_i.get_amplitude(),
Expand Down Expand Up @@ -186,16 +189,24 @@ def main():
parser.add_argument(
"--no-GPU", action="store_false", default=True, dest="has_gpu"
)
parser.add_argument("-c", "--config", default="config.yml", dest="config")
parser.add_argument(
"-c", "--config", default="config.yml", dest="config"
)
parser.add_argument(
"-i", "--init_params", default="init_params.json", dest="init"
)
parser.add_argument("-m", "--method", default="BFGS", dest="method")
parser.add_argument("-l", "--loop", type=int, default=1, dest="loop")
parser.add_argument(
"-m", "--method", default="BFGS", dest="method"
)
parser.add_argument(
"-l", "--loop", type=int, default=1, dest="loop"
)
parser.add_argument(
"-x", "--maxiter", type=int, default=2000, dest="maxiter"
)
parser.add_argument("-r", "--save_root", default=False, dest="save_root")
parser.add_argument(
"-r", "--save_root", default=False, dest="save_root"
)
parser.add_argument(
"--total-same", action="store_true", default=False, dest="total_same"
)
Expand All @@ -206,11 +217,17 @@ def main():
devices = "/device:CPU:0"
with tf.device(devices):
config = load_config(results.config, results.total_same)
try:
vm = config.get_amplitudes()[0].vm
except:
vm = config.get_amplitude().vm
fit_result = fit(
config, results.init, results.method, results.loop, results.maxiter
config, vm, results.init, results.method, results.loop, results.maxiter
)
if isinstance(config, ConfigLoader):
write_some_results(config, fit_result, save_root=results.save_root)
write_some_results(
config, fit_result, save_root=results.save_root
)
else:
write_some_results_combine(
config, fit_result, save_root=results.save_root
Expand Down
2 changes: 1 addition & 1 deletion state_cache.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ do
done
echo "using ${new_json_file} as params file"

newer_file=`find -cnewer ${new_json_file} | grep -v trash | grep -E ".*\.(C|json|root|log|png|txt|pdf)"`
newer_file=`find -cnewer ${new_json_file} | grep -v trash | grep -E ".*\.(C|json|root|log|png|txt|pdf|csv)"`
for i in ${newer_file};
do
cache_file ${i}
Expand Down
22 changes: 15 additions & 7 deletions tf_pwa/config_loader/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,13 +473,21 @@ def _plot_partial_wave(
linewidth=1,
)
else:
le3 = hist_i.draw(
ax,
color=curve_style[0],
linestyle=curve_style[1:],
label=label,
linewidth=1,
)
try:
le3 = hist_i.draw(
ax,
color=curve_style[0],
linestyle=curve_style[1:],
label=label,
linewidth=1,
)
except:
le3 = hist_i.draw(
ax,
color=curve_style,
label=label,
linewidth=1,
)
legends.append(le3[0])
legends_label.append(label)
if yscale == "log":
Expand Down
70 changes: 36 additions & 34 deletions tf_pwa/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,42 +350,42 @@ def refresh_vars(self, init_val=None, bound_dic=None):
dtype=self.dtype,
)
)
# all_vars = set(self.trainable_vars) # real vars
# real_vars = all_vars - set(cplx_vars)
for name in set(init_val) & set(self.trainable_vars):
if hasattr(init_val[name], "__len__"):
mu = init_val[name][0]
sigma = init_val[name][1]
if name not in bound_dic:
val = tf.random.normal(
shape=[], mean=mu, stddev=sigma, dtype=self.dtype
)
else:
range_ = bound_dic[name]
while True:

# for real parameters
for name in self.trainable_vars:
if name in init_val: # given initial values, in format either {name: [mu, sigma]} or {name: value}
if hasattr(init_val[name], "__len__"):
mu = init_val[name][0]
sigma = init_val[name][1]
if name not in bound_dic:
val = tf.random.normal(
shape=[], mean=mu, stddev=sigma, dtype=self.dtype
)
if val < range_[1] and val > range_[0]:
break
self.variables[name].assign(val)
else:
if init_val[name] is not None:
self.variables[name].assign(init_val[name])

for name in set(bound_dic) - set(init_val):
_min, _max = bound_dic[name]
if _min is not None:
if _max is not None:
val = tf.random.uniform(
shape=[], minval=_min, maxval=_max, dtype=self.dtype
)
else:
range_ = bound_dic[name]
while True:
val = tf.random.normal(
shape=[], mean=mu, stddev=sigma, dtype=self.dtype
)
if val < range_[1] and val > range_[0]:
break
self.variables[name].assign(val)
else:
val = _min + np.random.chisquare(df=1)
else:
if _max is not None:
val = _max - np.random.chisquare(df=1)
self.variables[name].assign(val)
if init_val[name] is not None:
self.variables[name].assign(init_val[name])
elif name in bound_dic: # if not given init_val but the parameter has boundary
_min, _max = bound_dic[name]
if _min is not None:
if _max is not None:
val = tf.random.uniform(
shape=[], minval=_min, maxval=_max, dtype=self.dtype
)
else:
val = _min + np.random.chisquare(df=1)
else:
if _max is not None:
val = _max - np.random.chisquare(df=1)
self.variables[name].assign(val)

def set_fix(self, name, value=None, unfix=False):
"""
Expand Down Expand Up @@ -678,17 +678,19 @@ def std_polar(self, name):
phase part is between :math:`-\\pi` to :math:`\\pi`.
:param name: String
"""
self.xy2rp(name)
if self.complex_vars[name] is not True: # it is a xy complex
return
r = self.variables[name + "r"]
p = self.variables[name + "i"]
if r < 0:
r.assign(tf.abs(r))
if type(self.complex_vars[name]) == list:
print("$$$!@@!deprecated in variable.VarsManager.std_polar") # could be deprecated
for name_r in self.complex_vars[name]:
self.variables[name_r[:-1] + "i"].assign_add(np.pi)
else:
p.assign_add(np.pi)
self._std_polar_angle(p)
p.assign(self._std_polar_angle(p))

def std_polar_all(self): # std polar expression: r>0, -pi<p<pi
"""
Expand Down