Skip to content

Commit

Permalink
add repeatable output with per-result json files
Browse files Browse the repository at this point in the history
  • Loading branch information
willsheffler committed Oct 6, 2020
1 parent 38558d8 commit 1702f3b
Show file tree
Hide file tree
Showing 2 changed files with 75 additions and 40 deletions.
37 changes: 30 additions & 7 deletions worms/criteria/stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,36 @@ def jit_lossfunc(self):

@jit
def func(pos, idx, verts):
cen2 = pos[to_seg, :, 3]
ax2 = pos[to_seg, :, 2]
dist_sq = cen2[0]**2 + cen2[1]**2
angl = np.arccos(np.abs(ax2[2]))
err_sq = (angl / rtol)**2
err_sq += dist_sq / ctol_sq
return np.sqrt(err_sq)

cen = pos[to_seg, :3, 3]
# cen = cen / np.linalg.norm(cen)
axis = pos[to_seg, :3, 2]
if np.sum(axis * cen) < 0:
axis = -axis

dist_sq = cen[0]**2 + cen[1]**2
if dist_sq > ctol_sq * 4: return 9e9

axis_angle = np.arccos(np.abs(axis[2]))
if axis_angle > rtol * 4: return 9e9

# cart_angle = np.arccos(np.abs(cen[2]/np.linalg.norm(cen)))
# cart_perp = np.array([cen[0], cen[1], 0])
correction_axis = np.array([axis[1], -axis[0], 0]) # cross prod w/z

correction_axis = correction_axis / np.linalg.norm(correction_axis)
cart_bad_err = np.abs(np.sum(correction_axis * cen))

cen_len = np.linalg.norm(cen)
axis_to_cart = axis * cen_len
delta = axis_to_cart - cen

return np.sqrt(np.sum(delta**2) / ctol_sq) #+ cart_bad_err

# ang_err2 = (axis_angle / rtol)**2
# dist_sq = cen[0]**2 + cen[1]**2
# dis_errg2= dist_sq / ctol_sq
# return np.sqrt(err_sq)

return func

Expand Down
78 changes: 45 additions & 33 deletions worms/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,25 +81,6 @@ def filter_and_output_results(
print('output skipping', iresult)
continue

if False:
# make json files with bblocks for single result
tmp, seenit = list(), set()
for j in range(len(ssdag.verts)):
v = ssdag.verts[j]
ibb = v.ibblock[result.idx[iresult, j]]
bb = ssdag.bbs[j][ibb]
fname = str(bytes(bb.file), 'utf-8')
if fname not in seenit:
for e in db[0]._alldb:
if e['file'] == fname:
tmp.append(e)
seenit.add(fname)
import json
jsonfname = 'tmp_%i.json' % iresult
print('output bblocks to', jsonfname)
with open(jsonfname, 'w') as out:
json.dump(tmp, out)

# print(getmem(), 'MEM ================ top of loop ===============')

if iresult % 100 == 0:
Expand Down Expand Up @@ -158,20 +139,8 @@ def filter_and_output_results(

# print(getmem(), 'MEM dbfilters before')
try:
(
jstr,
jstr1,
filt,
grade,
sp,
mc,
mcnh,
mhc,
nc,
ncnh,
nhc,
) = run_db_filters(db, criteria, ssdag, iresult, result.idx[iresult], pose, prov,
**kw)
(jstr, jstr1, filt, grade, sp, mc, mcnh, mhc, nc, ncnh, nhc) = run_db_filters(
db, criteria, ssdag, iresult, result.idx[iresult], pose, prov, **kw)
except Exception as e:
print("error in db_filters:")
print(traceback.format_exc())
Expand Down Expand Up @@ -329,6 +298,49 @@ def filter_and_output_results(
out.write("Closure error: " + str(rms) + "\n")
#

if True:
# make json files with bblocks for single result
tmp, seenit = list(), set()
detail = dict(bblock=list(), ires=list(), isite=list(), ichain=list())
for j in range(len(ssdag.verts)):
v = ssdag.verts[j]
ibb = v.ibblock[result.idx[iresult, j]]
bb = ssdag.bbs[j][ibb]
fname = str(bytes(bb.file), 'utf-8')
detail['bblock'].append(fname)
detail['ires'].append(v.ires[result.idx[iresult, j]].tolist())
detail['isite'].append(v.isite[result.idx[iresult, j]].tolist())
detail['ichain'].append(v.ichain[result.idx[iresult, j]].tolist())
if fname not in seenit:
for e in db[0]._alldb:
if e['file'] == fname:
tmp.append(e)
seenit.add(fname)

for e in tmp:
ires = list()
isite = list()
for i in range(len(detail['ires'])):
if e['file'] == detail['bblock'][i]:
ires.append(detail['ires'][i])
isite.append(detail['isite'][i])
for ic in isite:
if ic[0] is not -1: e['connections'][ic[0]]['residues'].clear()
if ic[1] is not -1: e['connections'][ic[1]]['residues'].clear()
for ir, ic in zip(ires, isite):
if ic[0] is not -1: e['connections'][ic[0]]['residues'].append(ir[0])
if ic[1] is not -1: e['connections'][ic[1]]['residues'].append(ir[1])

tmp = tmp.copy()
print(detail)

import json
jsonfname = 'tmp_%i.json' % iresult
print('output bblocks to', jsonfname)
with open(jsonfname, 'w') as out:
json.dump(tmp, out, indent=4)
out.write('\n')

print(getmem(), 'MEM dump pdb after')

if info_file is not None:
Expand Down

0 comments on commit 1702f3b

Please sign in to comment.