Commit 740f7904 authored by cmaffeo2's avatar cmaffeo2
Browse files

Updated nonbonded cache code to take up less space

parent 9c4c1637
import os
from pathlib import Path
__version__ = "0.1.dev0"
_RESOURCE_DIR = os.path.join(os.path.dirname(__file__), 'resources')
_RESOURCE_DIR = Path(__file__).parent / 'resources'
def get_resource_path(relative_path):
return os.path.join(_RESOURCE_DIR, relative_path)
return _RESOURCE_DIR / relative_path
from .segmentmodel import SegmentModel, SingleStrandedSegment, DoubleStrandedSegment
from .simulate import multiresolution_simulation
......
......@@ -88,8 +88,7 @@ class CanonicalNucleotideFactory(Group):
mass = d['f4'][i])
p = PointParticle( type_ = type_,
position = [xs[i],ys[i],zs[i]],
name = names[i].decode('UTF-8'),
beta = 1)
name = names[i].decode('UTF-8') )
self.children.append( p )
atoms = self.children
......
......@@ -5,10 +5,14 @@ from scipy.signal import savgol_filter as savgol
from mrdna.model.nonbonded import NonbondedScheme
from mrdna import get_resource_path
from mrdna.config import CACHE_DIR
dxParam = -1
## TODO: clean this file up a bit, make things protected
_package_cache_dir = get_resource_path("nbPot_cache")
_package_cache_dir.mkdir(exist_ok=True)
_dxParam = -1
def _maxForce(x,u0,maxForce=40):
maxForce = maxForce * 0.014393265 # convert pN to kcal_mol/AA
......@@ -31,9 +35,9 @@ def _maxForce(x,u0,maxForce=40):
d = np.loadtxt(get_resource_path("jj-nar-dna-pmf-100Mg.dat"))
x0 = d[:,1] + dxParam
x0 = d[:,1] + _dxParam
y0 = d[:,0] # kcal/mol for 10bp with 10bp, according to JY
y0 = y0 - np.mean(y0[x0>(38.5 + dxParam)]) # set 0 to tail of potential
y0 = y0 - np.mean(y0[x0>(38.5 + _dxParam)]) # set 0 to tail of potential
y0 = savgol(y0, int(5.0/(x0[1]-x0[0])), 3) # smooth potential
def parametric_potential(x,*parms):
......@@ -97,7 +101,6 @@ def nbPot(x,bps1,bps2):
try:
_pot_dict[key] = np.load(cache_file)['arr_0'].tolist()
except:
print("Could not find cache file for {}".format("nbPot-{}-{}.npz".format(*key)))
_pot_dict[key] = fit_potential(*key)
np.savez(cache_file, _pot_dict[key])
......@@ -110,6 +113,43 @@ def nbPot(x,bps1,bps2):
assert(np.sum(np.isnan(u2)) == 0)
return u2
try:
_cached_params = np.load( get_resource_path("nbPot.npz") )
except:
pass
def nbPot(x,bps1,bps2):
larger = np.max([bps1,bps2])
smaller = np.min([bps1,bps2])
smaller_shift, larger_shift = [_round_bp(bp) for bp in (smaller,larger)]
key = (smaller_shift,larger_shift)
if key not in _pot_dict:
filename = "nbPot-{}-{}.npz".format(*key)
def load_np(filename):
return np.load(filename)['arr_0'].tolist()
try:
_pot_dict[key] = _cached_params['{}:{}'.format(*key)]
except:
print( "failed" )
cache_file = CACHE_DIR / filename
try:
_pot_dict[key] = load_np(cache_file)
except:
_pot_dict[key] = fit_potential(*key)
np.savez(cache_file, _pot_dict[key])
u = parametric_potential(x,*_pot_dict[key])
u = u * ( (smaller/smaller_shift)*(larger/larger_shift) )
u1 = _maxForce(x,u, 100)
u2 = savgol(u1, int(5.0/(x0[1]-x0[0])), 3)
u2 = u
assert(np.sum(np.isnan(u2)) == 0)
return u2
def _writeNonbondedParameterFiles(self, prefix):
x = np.arange(0, 50, 0.1)
for i,j,t1,t2 in self._particleTypePairIter():
......@@ -148,6 +188,39 @@ def _run():
np.savetxt("test2-{}-{}.dat".format(i,j), np.array((x,y)).T),
_new_cache_dict = dict()
def _add_parameter_to_cache_dict(key):
new_cache_dict['{}:{}'.format(*key)] = _get_potential_parameters(key)
def _get_potential_parameters(key0):
bps1,bps2 = key0
larger = np.max([bps1,bps2])
smaller = np.min([bps1,bps2])
smaller_shift, larger_shift = [_round_bp(bp) for bp in (smaller,larger)]
key = (smaller_shift,larger_shift)
params = fit_potential(*key)
return params
def _write_cache(filename):
from multiprocessing import Pool
keys = []
for i in np.arange(0.4,5.5,0.01):
ir = _round_bp(i)
for j in np.arange(0.4,5.5,0.01):
if j < i: continue
keys.append( (ir,_round_bp(j)) )
keys = sorted(list(set(keys)))
print(len(keys))
p = Pool(15)
p.map( _add_parameter_to_cache_dict, keys )
np.savez(filename, **_new_cache_dict)
if __name__ == "__main__":
import cProfile
cProfile.run("_run()")
# import cProfile
# cProfile.run("_run()")
# _generate_cache()
pass
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment