Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: pre_trans for variable #112

Merged
merged 8 commits into from
Sep 18, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 42 additions & 0 deletions tf_pwa/config_loader/config_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,10 @@
self.add_free_var_constraints(amp, constrains.get("free_var", []))
self.add_var_range_constraints(amp, constrains.get("var_range", {}))
self.add_var_equal_constraints(amp, constrains.get("var_equal", []))
self.add_pre_trans_constraints(amp, constrains.get("pre_trans", None))
self.add_from_trans_constraints(
amp, constrains.get("from_trans", None)
)
self.add_gauss_constr_constraints(
amp, constrains.get("gauss_constr", {})
)
Expand Down Expand Up @@ -312,6 +316,44 @@
print("same value:", k)
amp.vm.set_same(k)

def add_pre_trans_constraints(self, amp, dic=None):
if dic is None:
return
from tf_pwa.transform import create_trans

for k, v in dic.items():
print("transform:", k, v)
v["x"] = v.get("x", k)
trans = create_trans(v)
amp.vm.pre_trans[k] = trans

def add_from_trans_constraints(self, amp, dic=None):
if dic is None:
return
var_equal = []
pre_trans = {}
new_var = []
for k, v in dic.items():
x = v.pop("x", None)
if x is not None:
if isinstance(x, list) and k != x[0]:
new_var += x
var_equal.append([x[0], k])
elif isinstance(x, str) and x != k:
new_var.append(x)
var_equal.append([x, k])
else:
raise TypeError("x should be str or list")

Check warning on line 346 in tf_pwa/config_loader/config_loader.py

View check run for this annotation

Codecov / codecov/patch

tf_pwa/config_loader/config_loader.py#L346

Added line #L346 was not covered by tests
else:
x = k

Check warning on line 348 in tf_pwa/config_loader/config_loader.py

View check run for this annotation

Codecov / codecov/patch

tf_pwa/config_loader/config_loader.py#L348

Added line #L348 was not covered by tests
v["x"] = x
pre_trans[k] = v
for i in new_var:
if i not in amp.vm.variables:
amp.vm.add_real_var(i)
ConfigLoader.add_var_equal_constraints(self, amp, var_equal)
ConfigLoader.add_pre_trans_constraints(self, amp, pre_trans)

def add_decay_constraints(self, amp, dic=None):
if dic is None:
dic = {}
Expand Down
6 changes: 6 additions & 0 deletions tf_pwa/tests/config_toy2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,12 @@ particle:
constrains:
particle: null
decay: null
from_trans:
"R_BD_mass":
x: R_CD_mass
model: linear
k: 1.0
b: 0.01

plot:
config:
Expand Down
27 changes: 27 additions & 0 deletions tf_pwa/tests/test_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,3 +142,30 @@ def test_rename():
vm.rename_var("d", "c", True)
assert vm.get("cr") == 2
assert vm.get("ci") == 3


def test_transform():
from tf_pwa.config_loader import ConfigLoader
from tf_pwa.transform import BaseTransform, register_trans

@register_trans("__test1")
class Atrans(BaseTransform):
def call(self, x):
return x[0] + x[1]

class Tmp:
pass

tmp = Tmp()
with variable_scope() as vm:
tmp.vm = vm
Variable("a", value=1.0)
Variable("b", value=1.0)
ConfigLoader.add_from_trans_constraints(
None, tmp, {"a": {"x": ["c", "b"], "model": "__test1"}}
)
vm.set("c", 1.0)
assert np.allclose(vm.get("a"), 2.0)
vm.set("a", 1.0)
assert vm.get_all_dic(False) == {"a": 2.0, "b": 1.0, "c": 1.0}
print(vm.get_all_dic(True))
53 changes: 53 additions & 0 deletions tf_pwa/transform.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
from .config import create_config

set_trans, get_trans, register_trans = create_config()

T = "Tensor"


class BaseTransform:
def __init__(self, x: "list | str", **kwargs):
self.x = x

def __call__(self, dic: dict) -> T:
x = self.read(dic)
return self.call(x)

def read(self, x: dict) -> T:
if isinstance(self.x, (list, tuple)):
return [x[i] for i in self.x]
elif isinstance(self.x, str):
return x[self.x]
else:
raise TypeError("only str of list of str is supported for x")

Check warning on line 22 in tf_pwa/transform.py

View check run for this annotation

Codecov / codecov/patch

tf_pwa/transform.py#L22

Added line #L22 was not covered by tests

def call(self, x: T) -> T:
raise NotImplementedError()

def inverse(self, y: T) -> T:
return None


def create_trans(item: dict) -> BaseTransform:
model = item.pop("model", "default")
cls = get_trans(model)
obj = cls(**item)
obj._model_name = model
return obj


@register_trans("default")
@register_trans("linear")
class LinearTrans(BaseTransform):
def __init__(
self, x: "list | str", k: float = 1.0, b: float = 0.0, **kwargs
):
super().__init__(x)
self.k = k
self.b = b

def call(self, x) -> T:
return self.k * x + self.b

def inverse(self, x: T) -> T:
return (x - self.b) / self.k

Check warning on line 53 in tf_pwa/transform.py

View check run for this annotation

Codecov / codecov/patch

tf_pwa/transform.py#L53

Added line #L53 was not covered by tests
27 changes: 19 additions & 8 deletions tf_pwa/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ def __init__(self, name="", dtype=tf.float64):
self.complex_vars = {} # {name:polar(bool),...}
self.same_list = [] # [[name1,name2],...]
self.mask_vars = {}
self.pre_trans = {}

self.bnd_dic = {} # {name:(a,b),...}

Expand Down Expand Up @@ -510,8 +511,8 @@ def same_real(name_list):
self.trainable_vars.remove(name)
else:
# if one is untrainable, the others will all be untrainable
var = self.variables.get(name, None)
if var is not None:
var2 = self.variables.get(name, None)
if var2 is not None:
if name_list[0] in self.trainable_vars:
self.trainable_vars.remove(name_list[0])
for name in name_list:
Expand All @@ -534,15 +535,21 @@ def get(self, name, val_in_fit=True):
if name not in self.variables:
raise Exception("{} not found".format(name))
if not val_in_fit or name not in self.bnd_dic:
return self.variables[name].numpy() # tf.Variable
value = self.variables[name]
if name in self.pre_trans:
value = self.pre_trans[name](self.variables)
return value.numpy() # tf.Variable
else:
return self.bnd_dic[name].get_y2x(self.variables[name].numpy())

def read(self, name):
val = self.variables[name]
if name in self.mask_vars:
return tf.stop_gradient(tf.cast(self.mask_vars[name], val.dtype))
return self.variables[name]
val = tf.stop_gradient(tf.cast(self.mask_vars[name], val.dtype))
if name in self.pre_trans:
trans = self.pre_trans[name]
val = trans(self.variables)
return val

def set(self, name, value, val_in_fit=True):
"""
Expand All @@ -554,7 +561,11 @@ def set(self, name, value, val_in_fit=True):
if val_in_fit and name in self.bnd_dic:
value = self.bnd_dic[name].get_x2y(value)
if name in self.variables:
self.variables[name].assign(value)
if name in self.pre_trans:
trans = self.pre_trans[name]
value = trans.inverse(value)
if value is not None:
self.variables[name].assign(value)
else:
warnings.warn("{} not found".format(name))

Expand Down Expand Up @@ -632,13 +643,13 @@ def get_all_dic(self, trainable_only=False):
dic = {}
if trainable_only:
for i in self.trainable_vars:
val = self.variables[i].numpy()
val = self.read(i).numpy()
# if i in self.bnd_dic:
# val = self.bnd_dic[i].get_y2x(val)
dic[i] = val
else:
for i in self.variables:
val = self.variables[i].numpy()
val = self.read(i).numpy()
# if i in self.bnd_dic:
# val = self.bnd_dic[i].get_y2x(val)
dic[i] = val
Expand Down
Loading