beadModelTwist.py 55.11 KiB
# -*- coding: utf-8 -*-
from datetime import datetime
from cadnano.cnenum import PointType
from math import pi,sqrt,exp,floor
import numpy as np
from scipy.special import erf
import scipy.optimize as opt
import os, sys, subprocess
import nbPot
from coords import minimizeRmsd, quaternionToMatrix3, rotationAboutAxis
class HarmonicPotential:
def __init__(self, k, r0, rRange=(0,50), resolution=0.1, maxForce=None, maxPotential=None):
self.k = k
self.r0 = r0
self.maxForce = maxForce
self.maxPotential = maxPotential
self.rRange = rRange
self.resolution = resolution
self.periodic = False
self.type = "None"
self._kscale = None
def filename(self, prefix='potentials/'):
# raise NotImplementedError("Not implemented")
return "%s%s-%.3f-%.3f.dat" % (prefix, self.type,
self.k*self._kscale, self.r0)
def write_file(self, prefix='potentials/'):
r = np.arange( self.rRange[0],
self.rRange[1]+self.resolution,
self.resolution )
dr = r-self.r0
if self.periodic == True:
rSpan = self.rRange[1]-self.rRange[0]
assert(rSpan > 0)
dr = np.mod( dr+0.5*rSpan, rSpan) - 0.5*rSpan
u = 0.5*self.k*dr**2
if self.maxForce is not None:
assert(self.maxForce > 0)
f = np.diff(u)/np.diff(r)
f[f > self.maxForce] = self.maxForce
f[f < -self.maxForce] = -self.maxForce
u[0] = 0
u[1:] = np.cumsum(f*np.diff(r))
u = u - np.min(u)
if self.maxPotential is not None:
f = np.diff(u)/np.diff(r)
ids = np.where( 0.5*(u[1:]+u[:-1]) > self.maxPotential )[0]
w = np.sqrt(2*self.maxPotential/self.k)
drAvg = 0.5*(np.abs(dr[ids]) + np.abs(dr[ids+1]))
f[ids] = f[ids] * np.exp(-(drAvg-w)/(w))
u[0] = 0
u[1:] = np.cumsum(f*np.diff(r))
u = u - np.min(u)
np.savetxt( self.filename(prefix), np.array([r, u]).T, fmt="%f" )
def __hash__(self):
assert(self.type != "None")
return hash((self.type, self.k, self.r0, self.rRange, self.resolution, self.maxForce, self.maxPotential, self.periodic))
def __eq__(self, other):
for a in ("type", "k", "r0", "rRange", "resolution", "maxForce", "maxPotential", "periodic"):
if self.__dict__[a] != other.__dict__[a]:
return False
return True
class NonBonded(HarmonicPotential):
def __init__(self, k, r0, rRange=(0,50), resolution=0.1, maxForce=None, maxPotential=None):
super().__init__(k,r0,rRange,resolution,maxForce,maxPotential)
self.type = "nonbonded"
self._kscale = 1.0
class Bond(HarmonicPotential):
def __init__(self, k, r0, rRange=(0,800), resolution=0.1, maxForce=5, maxPotential=None):
super().__init__(k,r0,rRange,resolution,maxForce,maxPotential)
self.type = "bond"
self._kscale = 1.0
class Angle(HarmonicPotential):
def __init__(self, k, r0, rRange=(0,180), resolution=0.5, maxForce=None, maxPotential=None):
super().__init__(k,r0,rRange,resolution,maxForce,maxPotential)
self.type = "angle"
self._kscale = (180.0/pi)**2
class Dihedral(HarmonicPotential):
def __init__(self, k, r0, rRange=(-180,180), resolution=1, maxForce=None, maxPotential=None):
super().__init__(k,r0,rRange,resolution,maxForce,maxPotential)
self.periodic = True
self.type = "dihedral"
self._kscale = (180.0/pi)**2
class Node():
def __init__(self, helix, pos, type="dsDNA"):
self.helix = helix
self.position = np.array(pos)
self.initialPosition = np.array(pos)
self.type = type
self.nodeAbove = None
self.nodeBelow = None
self.xovers = []
self.ssXovers = []
self.orientationNode = None
self.parentNode = None
self.idx = helix.model.numParticles
helix.model.numParticles += 1
def addNodeAbove(self, node, separation):
assert(self.nodeAbove is None)
self.nodeAbove = node
self.nodeAboveSep = separation # bp
def addNodeBelow(self, node, separation):
assert(self.nodeBelow is None)
self.nodeBelow = node
self.nodeBelowSep = separation # bp
def addXover(self, node, fwds, double=False):
## TODO: what is meant by polarity?
self.xovers.append( (node,fwds,double) )
def addSsXover(self, node, fwds):
self.ssXovers.append( (node,fwds) )
def getNodesAbove(self,numNodes,inclusive=False):
assert( type(numNodes) is int and numNodes > 0 )
nodeList,sepList = [[],[]]
n = self
if inclusive:
nodeList.append(n)
for i in range(numNodes):
if n.nodeAbove is None: break
n = n.nodeAbove
nodeList.append(n)
sepList.append(n.nodeBelowSep)
return nodeList,sepList
def getNodesBelow(self,numNodes,inclusive=False):
assert( type(numNodes) is int and numNodes > 0 )
nodeList,sepList = [[],[]]
n = self
if inclusive:
nodeList.append(n)
for i in range(numNodes):
if n.nodeBelow is None: break
n = n.nodeBelow
nodeList.append(n)
sepList.append(n.nodeBelowSep)
return nodeList,sepList
def addOrientationNode(self, node):
assert(self.nodeBelow is None)
self.orientationNode = node
node.parentNode = self
class helix():
def __init__(self, model, part, hid):
self.model = model
self.props = part.getModelProperties().copy() # TODO: maybe move this out of here
self.nodes = dict()
self.orientationNodes = dict()
self.hid = hid
if self.props.get('point_type') == PointType.ARBITRARY:
# TODO add code to encode Parts with ARBITRARY point configurations
raise NotImplementedError("Not implemented")
else:
vh_props, origins = part.helixPropertiesAndOrigins()
for x in vh_props:
self.props[x] = vh_props[x][hid]
self.origin = origins[hid]
x,y = self.origin
self.zIdxToPos = lambda idx: (x*10,y*10,-3.4*idx)
## get twizt
keys = ['bases_per_repeat',
'turns_per_repeat',
'eulerZ','z']
bpr,tpr,eulerZ,z = [vh_props[k][hid] for k in keys]
twist_per_base = tpr*360./bpr
self.zIdxToAngle = lambda idx: idx*twist_per_base + eulerZ + 160
def addNode(self, zIdx, strandOccupancies):
## Determine what kind of node we are making
i = int(round(zIdx))
if i in strandOccupancies[0] and i in strandOccupancies[1]:
type = "dsDNA"
elif i in strandOccupancies[0] or i in strandOccupancies[1]:
# type = "dsDNA"
type = "ssDNA"
else:
raise Exception( "Attempt to add a node at %d where there is no DNA!\n Strand at indeces: %s" % (i,strandOccupancies) )
## Add the node
n = Node(self, self.zIdxToPos(zIdx), type)
if zIdx in self.nodes:
raise Exception("Attempted to add a node in the same location (%d:%.1f) twice!" % (self.hid,zIdx))
self.nodes[zIdx] = n
if type == "dsDNA":
angle = self.zIdxToAngle(zIdx)
pos = np.array([2.0,0,0]).dot( rotationAboutAxis([0,0,1], angle) )
o = Node(self, np.array(self.zIdxToPos(zIdx)) + pos, "O")
self.orientationNodes[zIdx] = o
n.addOrientationNode(o)
## Update ordered list of nodes
if self.model.particles is not None:
model.buildOrderedParticlesList()
return n
def getOrigin(self):
return self.origin
def __iter__(self):
for x in sorted(self.nodes.items(), key=lambda x: x[0]):
yield x
class beadModelTwist():
def __init__(self, part, twistPersistenceLength=75.0, maxBpsPerDNode=4, maxNtsPerSNode=2):
self.numParticles = 0
self.helices = dict()
self.particles = None
self.particleTypeCounts = None
# self._nbParams = set()
self.bonds = set()
self.angles = set()
self.dihedrals = set()
self._nbParamFiles = []
# self._bondParamFiles = set()
# self._angleParamFiles = set()
# self._dihedralParamFiles = set()
self.twistPersistenceLength = twistPersistenceLength
self._buildModel(part, maxBpsPerDNode, maxNtsPerSNode)
## Post process the model
self.buildOrderedParticlesList()
self._setTypes()
self._countParticleTypes()
self.buildOrderedParticlesList()
def __iter__(self):
for x in sorted(self.helices.items(), key=lambda x: x[0]):
yield x
def buildOrderedParticlesList(self):
## Create ordered list
particles = [(n,hid,zid) for hid,hlx in self for zid,n in hlx]
particles.extend( [(o,hid,zid) for hid,hlx in self for zid,o in hlx.orientationNodes.items()] )
self.particles = sorted(particles, key=lambda x: (x[0].type, x[0].idx))
## Update node indices
for p,i in zip(self.particles,range(self.numParticles)):
p[0].idx = i
self.initialCoords = np.array([p[0].initialPosition for p in self.particles])
self._nodeHids = np.array([p[1] for p in self.particles])
def _setTypes(self):
for p,hid,zid in self.particles:
if p.type == "O":
p.bps = 0
continue
bps = []
if p.nodeAbove is not None: bps.append(p.nodeAboveSep)
if p.nodeBelow is not None: bps.append(p.nodeBelowSep)
if bps == []: bps = [3]
p.bps = 10*np.mean(bps)
if p.type == "ssDNA":
p.bps *= 0.5
p.bps = int(round(p.bps))
p.type = "%s%d" % (p.type[0], p.bps)
def _countParticleTypes(self):
particleTypeCounts = dict()
for p in self.particles:
t = p[0].type
if t in particleTypeCounts:
particleTypeCounts[t] += 1
else:
particleTypeCounts[t] = 1
self.particleTypeCounts = particleTypeCounts
def addHelix(self, part, hid):
h = helix(self,part,hid)
self.helices[hid] = h
return h
def _helixStrandsToEnds(self, helixStrands):
"""Utility method to convert cadnano strand lists into list of
indices of terminal points"""
endLists = [[],[]]
for endList, strandList in zip(endLists,helixStrands):
lastStrand = None
for s in strandList:
if lastStrand is None:
## first strand
endList.append(s[0])
elif lastStrand[1] != s[0]-1:
assert( s[0] > lastStrand[1] )
endList.extend( [lastStrand[1], s[0]] )
lastStrand = s
if lastStrand is not None:
endList.append(lastStrand[1])
return endLists
def simulate(self, outputPrefix, outputDirectory='output', numSteps=100000000, timestep=100e-6, gpu=0, arbd=None):
self._buildBonds(outputPrefix)
self._buildAngles(outputPrefix)
self._buildDihedrals(outputPrefix)
## Check that potentials don't have hash collisions
for potSet in (self.bonds, self.angles, self.dihedrals):
pots = [p[-1] for p in potSet]
d = dict()
for p in pots:
f = p.filename
if f not in d: d[f]=[]
d[f].append(p)
for f,pots in d.items():
assert( len(set(pots)) == 1 )
assert(type(gpu) is int)
assert(type(numSteps) is int)
if outputDirectory == '': outputDirectory='.'
if arbd is None:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
fname = os.path.join(path, "arbd")
if os.path.isfile(fname) and os.access(fname, os.X_OK):
arbd = fname
break
if not os.path.exists(arbd):
raise Exception("ARBD was not found")
if not os.path.isfile(arbd):
raise Exception("ARBD was not found")
if not os.access(arbd, os.X_OK):
raise Exception("ARBD is not executable")
if not os.path.exists(outputDirectory):
os.makedirs(outputDirectory)
elif not os.path.isdir(outputDirectory):
raise Exception("outputDirectory '%s' is not a directory!" % outputDirectory)
self.writePdb( outputPrefix + ".pdb" )
self.writePsf( outputPrefix + ".psf" )
self.writeArbdFiles( outputPrefix, numSteps=numSteps, timestep=timestep )
env = os.environ.copy()
env["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
## http://stackoverflow.com/questions/18421757/live-output-from-subprocess-command
# cmd = "%s -g %d %s.bd %s/%s" % (arbd, gpu, outputPrefix, outputDirectory, outputPrefix)
# cmd = (arbd, (-g %d %s.bd %s/%s" % (gpu, outputPrefix, outputDirectory, outputPrefix))
cmd = (arbd, '-g', "%d" % gpu, "%s.bd" % outputPrefix, "%s/%s" % (outputDirectory, outputPrefix))
cmd = tuple(str(x) for x in cmd)
print("Running ARBD with: %s" % " ".join(cmd))
process = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE, universal_newlines=True)
for line in process.stdout:
# for line in iter(process.stdout.readline, b''):
sys.stdout.write(line)
sys.stdout.flush()
# sys.stdout.write(line.decode(sys.stdout.encoding))
# -------------------------- #
# Methods for building model #
# -------------------------- #
def _buildModel(self, part, maxBpsPerDNode, maxNtsPerSNode):
# maxVhelixId = part.getIdNumMax()
props = part.getModelProperties().copy()
# print(props)
if props.get('point_type') == PointType.ARBITRARY:
# TODO add code to encode Parts with ARBITRARY point configurations
raise NotImplementedError("Not implemented")
else:
vh_props, origins = part.helixPropertiesAndOrigins()
# print(' VIRTUAL HELICES:', vh_props)
# # print(' ORIGINS:', origins)
# group_props['virtual_helices'] = vh_props
# group_props['origins'] = origins
## TODO: compartmentalize following
## Loop over virtual helices and build lists of strands
vh_list = []
strand_list = []
xover_list = []
numHID = part.getIdNumMax() + 1
for id_num in range(numHID):
offset_and_size = part.getOffsetAndSize(id_num)
if offset_and_size is None:
# add a placeholder
vh_list.append((id_num, 0))
strand_list.append(None)
# prop_list.append(None)
else:
offset, size = offset_and_size
vh_list.append((id_num, size))
fwd_ss, rev_ss = part.getStrandSets(id_num)
# for s in fwd_ss:
# print(' VHELIX %d fwd_ss:' % id_num, s)
fwd_idxs, fwd_colors = fwd_ss.dump(xover_list)
rev_idxs, rev_colors = rev_ss.dump(xover_list)
strand_list.append((fwd_idxs, rev_idxs))
# if id_num < 2:
# print( fwd_idxs )
# for s in fwd_ss:
# print( s.insertionsOnStrand() )
## prop_list.append((fwd_colors, rev_colors))
# for s in strand_list:
# print( s )
## Get dictionary of insertions
allInsertions = part.insertions()
## Expand strand_lists for crossover filtering
expandedStrandList = []
for fwdRevStrands in strand_list:
tmp = []
if fwdRevStrands is not None:
for strands in fwdRevStrands:
fwdOrRev = []
for a,b in strands: fwdOrRev.extend(range(a,b+1))
tmp.append(fwdOrRev)
expandedStrandList.append(tmp)
## Find crossovers involving ssDNA and dsDNA
ssXoList, dsXoList, extraInterhelicalBondList = [[],[],[]]
for entry in xover_list:
h1,f1,z1,h2,f2,z2 = entry
if strand_list[h1] is None or strand_list[h2] is None:
print("WARNING: crossover to empty helix")
continue
ds1 = z1 in expandedStrandList[h1][0] and z1 in expandedStrandList[h1][1]
ds2 = z2 in expandedStrandList[h2][0] and z2 in expandedStrandList[h2][1]
occ1Above = z1+1 in expandedStrandList[h1][0] or z1+1 in expandedStrandList[h1][1]
occ2Above = z2+1 in expandedStrandList[h2][0] or z2+1 in expandedStrandList[h2][1]
occ1Below = z1-1 in expandedStrandList[h1][0] or z1-1 in expandedStrandList[h1][1]
occ2Below = z2-1 in expandedStrandList[h2][0] or z2-1 in expandedStrandList[h2][1]
if ((not occ1Above) and (not occ2Below)) or \
((not occ1Below) and (not occ2Above)):
extraInterhelicalBondList.append(entry)
else:
if ds1 and ds2:
dsXoList.append(entry)
else:
ssXoList.append(entry)
## Build dictionary of dsDNA crossovers
xoDicts = [dict() for i in range(numHID)]
for hid1 in range(numHID):
tmp = xoDicts[hid1]
for hid2 in range(numHID):
allXos = {(z1,z2,f1,f2) for h1,f1,z1,h2,f2,z2 in dsXoList if h1 == hid1 and h2 == hid2}
allXos.update( {(z2,z1,f2,f1) for h1,f1,z1,h2,f2,z2 in dsXoList if h2 == hid1 and h1 == hid2} )
allXos = sorted(list(allXos), key = lambda x: (x[0],x[1]))
## Replace each double-crossover with a single one
excludedXos, extraXos = [set(),set()]
for i in range(len(allXos)):
xoi = allXos[i]
for j in range(i+1,len(allXos)):
xoj = allXos[j]
if xoj[0] - xoi[0] > 2: break
if xoi[0]+1 == xoj[0] and xoi[1]+1 == xoj[1] and \
xoi[2] == xoj[2] and xoi[3] == xoj[3]:
excludedXos.add(xoi)
excludedXos.add(xoj)
extraXos.add( (xoi[0]+0.5,xoi[1]+0.5,xoi[2],xoi[3]) )
xos = {xo for xo in allXos if xo not in excludedXos}
xos.update(extraXos)
## Set dictionary entry
xos = list(xos)
if len(xos) > 0:
tmp[hid2] = xos
## Build dictionary of dsDNA crossovers
xoDicts = [dict() for i in range(numHID)]
for hid1 in range(numHID):
tmp = xoDicts[hid1]
for hid2 in range(numHID):
allXos = {(z1,z2,f1,f2) for h1,f1,z1,h2,f2,z2 in dsXoList if h1 == hid1 and h2 == hid2}
allXos.update( {(z2,z1,f2,f1) for h1,f1,z1,h2,f2,z2 in dsXoList if h2 == hid1 and h1 == hid2} )
allXos = sorted(list(allXos), key = lambda x: (x[0],x[1]))
## Replace each double-crossover with a single one
excludedXos, extraXos = [set(),set()]
for i in range(len(allXos)):
xoi = allXos[i]
for j in range(i+1,len(allXos)):
xoj = allXos[j]
if xoj[0] - xoi[0] > 2: break
if xoi[0]+1 == xoj[0] and xoi[1]+1 == xoj[1] and \
xoi[2] == xoj[2] and xoi[3] == xoj[3]:
excludedXos.add(xoi)
excludedXos.add(xoj)
extraXos.add( (xoi[0]+0.5,xoi[1]+0.5,xoi[2],xoi[3]) )
xos = {xo for xo in allXos if xo not in excludedXos}
xos.update(extraXos)
## Set dictionary entry
xos = list(xos)
if len(xos) > 0:
tmp[hid2] = xos
## Build dictionary of ssDNA crossovers
ssXoDicts = [dict() for i in range(numHID)]
for hid1 in range(numHID):
tmp = ssXoDicts[hid1]
for hid2 in range(numHID):
xos = {(z1,z2,f1,f2) for h1,f1,z1,h2,f2,z2 in ssXoList if h1 == hid1 and h2 == hid2}
xos.update( {(z2,z1,f2,f1) for h1,f1,z1,h2,f2,z2 in ssXoList if h2 == hid1 and h1 == hid2} )
xos = sorted(list(xos), key = lambda x: (x[0],x[1]))
## Set dictionary entry
if len(xos) > 0:
tmp[hid2] = xos
## Build helices
for hid in range(numHID):
# print("Working on helix",hid)
helixStrands = strand_list[hid]
if helixStrands is None:
continue
## Build list of tuples containing (idx,length) of insertions/skips
insertions = sorted( [(i[0],i[1].length()) for i in allInsertions[hid].items()],
key=lambda x: x[0] )
## Build list of strand ends and list of mandatory node locations
ends1,ends2 = self._helixStrandsToEnds(helixStrands)
# xoZids = [x for x in xoDicts2[hid].keys()]
## Find crossovers for this helix
xoZids = [x[1] for h0 in range(hid) if hid in xoDicts[h0] for x in xoDicts[h0][hid]]
xoZids.extend([x[0] for hid2,xos in xoDicts[hid].items() for x in xos])
xoZids.extend([x[1] for h0 in range(hid) if hid in ssXoDicts[h0] for x in ssXoDicts[h0][hid]])
xoZids.extend([x[0] for hid2,xos in ssXoDicts[hid].items() for x in xos])
reqNodeZids = sorted(list(set( ends1 + ends2 + xoZids ) ) )
## Build lists of which nt sites are occupied in the helix
strandOccupancies = [ [x for i in range(0,len(e),2)
for x in range(e[i],e[i+1]+1)]
for e in (ends1,ends2) ]
## Build helix by adding nodes
beadHelix = self.addHelix(part,hid)
if hid in ():
print("%d nodes:" %hid,reqNodeZids)
print("orig xos:",[xo for xo in xover_list if xo[0] == hid or xo[3] == hid])
print("xosZids:", sorted(xoZids) )
print("strandOccupancy1:",strandOccupancies[0])
print("strandOccupancy2:",strandOccupancies[1])
prevNode = None
for i in range( len(reqNodeZids)-1 ):
zid1,zid2 = reqNodeZids[i:i+2]
## Check that there are nts between zid1 and zid2 before adding nodes
zMid = int(0.5*(zid1+zid2))
if zMid in strandOccupancies[0] and zMid in strandOccupancies[1]:
## dsDNA
maxBpsPerNode = maxBpsPerDNode
if zMid in strandOccupancies[0] or zMid in strandOccupancies[1]:
## ssDNA
maxBpsPerNode = maxNtsPerSNode
else:
continue
numBps = zid2-zid1
# if numBps < 2:
# print(hid,zid1,zid2)
# assert(numBps >= 1)
for ins_idx,length in insertions:
## TODO: ensure placement of insertions is correct
## (e.g. are insertions at the ends handled correctly?)
if ins_idx < zid1:
continue
if ins_idx >= zid2:
break
numBps += length
# if numBps = 0:
# print("WARNING: found stretch of DNA with 0 length; skipping")
# next
nodesBetween = round( float(numBps-1)/maxBpsPerNode )
if nodesBetween < 0:
nodesBetween = 0
bpsPerNode = float(numBps)/(nodesBetween+1)
if bpsPerNode == 0:
bpsPerNode = 0.1
zidPerNode = float(zid2-zid1)/(nodesBetween+1)
try:
if prevNode is None:
prevNode = beadHelix.addNode( zid1, strandOccupancies )
for i in range(nodesBetween):
node = beadHelix.addNode( zid1+(i+1)*zidPerNode, strandOccupancies )
self._connectNodes(prevNode, node, bpsPerNode)
prevNode = node
node = beadHelix.addNode( zid2, strandOccupancies )
self._connectNodes(prevNode, node, bpsPerNode)
except:
print(hid,zid1,zid2,nodesBetween,bpsPerNode)
raise Exception("ERROR")
prevNode = None
if (int(floor(zid2+1)) in strandOccupancies[0]) or \
(int(floor(zid2+1)) in strandOccupancies[1]):
prevNode = node
## Add extra intrahelical bonds
## Add crossovers
for entry in extraInterhelicalBondList:
h1,f1,z1,h2,f2,z2 = entry
n1 = self.helices[h1].nodes[z1]
n2 = self.helices[h2].nodes[z2]
try:
self._connectNodes(n1,n2,1)
except:
assert(True)
try:
self._connectNodes(n2,n1,1)
except:
assert(True)
## Add crossovers
for hid1 in range(numHID):
for hid2, xos in xoDicts[hid1].items():
for xo in xos:
self._addCrossover(hid1,hid2,xo)
## Add ssDNA xovers
for hid1 in range(numHID):
for hid2, xos in ssXoDicts[hid1].items():
for xo in xos:
self._addSsCrossover(hid1,hid2,xo)
return
def _connectNodes(self, below, above, sep):
below.addNodeAbove(above, sep)
above.addNodeBelow(below, sep)
def _addCrossover(self, hid1, hid2, xo):
zid1, zid2, isFwd1, isFwd2 = xo
node1 = self.helices[hid1].nodes[zid1]
node2 = self.helices[hid2].nodes[zid2]
## TODO add polarity
polarity = 0
node1.addXover(node2, (isFwd1, isFwd2))
node2.addXover(node1, (isFwd2, isFwd1))
def _addSsCrossover(self, hid1, hid2, xo):
zid1, zid2, isFwd1, isFwd2 = xo
node1 = self.helices[hid1].nodes[zid1]
node2 = self.helices[hid2].nodes[zid2]
## TODO add polarity
polarity = 0
node1.addSsXover(node2, isFwd1)
node2.addSsXover(node1, isFwd2)
def addModel(self, model):
assert( isinstance(model, type(self)) )
hidOffset = max( self.helices.keys() ) + 1
# nidOffset = self.numParticles
for hid,h in model:
self.helices[hid+hidOffset] = h
self.numParticles += model.numParticles
self.buildOrderedParticlesList()
self._setTypes()
self._countParticleTypes()
def backmap(self, simplerModel, simplerModelCoords,
dsDnaHelixNeighborDist=50, dsDnaAllNeighborDist=30,
ssDnaHelixNeighborDist=25, ssDnaAllNeighborDist=25):
## Assign each bead to a bead in simplerModel
mapToSimplerModel = dict()
cgWeight = dict()
for hDict,cgHDict in zip(self,simplerModel):
assert(hDict[0] == cgHDict[0])
h,cgH = [x[1] for x in (hDict,cgHDict)] # get helix
zIdxs = np.array( sorted([i for i,b in cgH]) )
for i,b in h:
cgi = np.searchsorted(zIdxs,i,side='left',sorter=None)
cgi, = [zIdxs[x] if x < len(zIdxs) else zIdxs[-1] for x in (cgi,)]
mapToSimplerModel[b.idx] = [cgH.nodes[x] for x in (cgi,)]
for i,b in h.orientationNodes.items():
cgi = np.searchsorted(zIdxs,i,side='left',sorter=None)
cgi, = [zIdxs[x] if x < len(zIdxs) else zIdxs[-1] for x in (cgi,)]
mapToSimplerModel[b.idx] = [cgH.nodes[x] for x in (cgi,)]
## Find new axis and position of each bead using neighborhood
beads = [b for h in self for i,b in h[1].nodes.items()]
## Find transformation for each bead of simplerModel
trans = dict()
for b in list(set([b for i,bs in mapToSimplerModel.items() for b in bs])):
helixCutoff = dsDnaHelixNeighborDist if b.type[0] in ('d','O') else ssDnaHelixNeighborDist
allCutoff = dsDnaAllNeighborDist if b.type[0] in ('d','O') else ssDnaAllNeighborDist
ids = []
attempts = 0
while len(ids) <= 3:
if attempts > 15: raise Exception("Too many attempts to find a neighborhood for backmaping bead %d" % b.idx)
ids = simplerModel._getNeighborhoodIds(b, simplerModelCoords, helixCutoff, allCutoff)
allCutoff *= 1.2
attempts+=1
posOld = np.array( [simplerModel.particles[i][0].initialPosition for i in ids] )
posNew = np.array( [simplerModelCoords[i] for i in ids] )
try:
trans[b.idx] = minimizeRmsd( posOld, posNew )
except:
raise Exception("Failed to find orientation of atom %d in the coarser model" % b.idx)
# print("ugly")
## Optionally smooth orientations
## Apply transformation to each bead of self
beads.extend( [b for h in self for i,b in h[1].orientationNodes.items()] )
for b in beads:
cgb, = mapToSimplerModel[b.idx]
cgi = cgb.idx
r0 = simplerModel.particles[cgi][0].initialPosition
R,c0,c1 = trans[cgi]
b.position = (b.initialPosition - r0).dot(R) + simplerModelCoords[cgi]
assert( np.all(np.isreal( b.position )) )
def _getNeighborhoodIds(self, bead, coords, helixCutoff=50, allCutoff=np.sqrt(35)):
i = bead.idx
coords0 = self.initialCoords
# print(coords0[i,:])
coordsI = np.outer(coords0[i,:],np.ones([len(coords0),1])).T
dr2Initial = np.sum((coords0 - coordsI)**2, axis=-1)
dr2Final = np.sum((coords - coords[i,:])**2, axis=-1)
## Include all in same helix within 5 nm of bead after simulation
ret = list( np.where( (dr2Final < helixCutoff**2) * (self._nodeHids == bead.helix.hid) )[0] )
ret = list( np.where( (dr2Final < helixCutoff**2) * (self._nodeHids == bead.helix.hid) * (dr2Initial < 100**2) )[0] )
## Include all within 3.5 nm both before AND after simulation
ret.extend( list( np.where( (dr2Final < allCutoff**2) * (dr2Initial < allCutoff**2) )[0] ) )
return sorted(list(set(ret)))
# -------------------------- #
# Methods for querying model #
# -------------------------- #
def _getIntrahelicalNodeSeries(self,seriesLen):
nodeSeries = set()
for hid,hlx in self:
for zid,n in hlx:
nodeList,sepList = n.getNodesAbove(seriesLen-1, inclusive = True)
if len(nodeList) == seriesLen:
nodeList = tuple(nodeList)
sepList = tuple(sepList)
nodeSeries.add( tuple((nodeList,sepList)) )
return nodeSeries
def _getIntrahelicalBonds(self):
return self._getIntrahelicalNodeSeries(2)
def _getIntrahelicalAngles(self):
return self._getIntrahelicalNodeSeries(3)
def _getOrientationBonds(self):
nodeSeries = set()
for hid,hlx in self:
for zid,n in hlx:
if n.orientationNode is not None:
nodeSeries.add( tuple(((n.orientationNode,n),(0.2,))) )
return nodeSeries
def _getOrientationAngles(self):
nodeSeries = set()
for hid,hlx in self:
for zid,n in hlx:
if n.orientationNode is not None and n.nodeAbove is not None:
nodeSeries.add( tuple(((n.orientationNode,n,n.nodeAbove),(0.2, n.nodeAboveSep))) )
return nodeSeries
def _getOrientationDihedrals(self):
nodeSeries = set()
for hid,hlx in self:
for zid,n1 in hlx:
if n1.nodeAbove is not None:
n2 = n1.nodeAbove
if n1.orientationNode is not None and n2.orientationNode is not None:
nodeSeries.add( tuple(((n1.orientationNode,n1,n2,n2.orientationNode),
(0.2, n1.nodeAboveSep, 0.2))) )
return nodeSeries
def _getCrossoverBonds(self):
return { ((n, xo[0]), xo[1])
for hid,hlx in self
for zid,n in hlx for xo in n.xovers if n.idx < xo[0].idx }
def _getSsCrossoverBonds(self):
return { ((n, xo[0]), xo[1])
for hid,hlx in self
for zid,n in hlx for xo in n.ssXovers if n.idx < xo[0].idx }
def _getCrossoverAnglesAndDihedrals(self):
angles,dihedrals = [set(),set()]
contiguousCrossovers = []
for hid,hlx in self.helices.items():
crossovers = []
bpsBetween = 0
for zid,n in hlx:
## Search for contiguous crossovers
if n.nodeBelow is None or n.type[0] != "d":
## Found ssDNA or a gap; reset search
if len(crossovers) > 0:
contiguousCrossovers.append(crossovers)
crossovers = []
bpsBetween = 0
if n.nodeBelow is not None:
bpsBetween += n.nodeBelowSep
if len(n.xovers) > 0:
crossovers.append( (n,bpsBetween) )
if len(crossovers) > 0:
contiguousCrossovers.append(crossovers)
## Process contiguousCrossovers
for crossovers in contiguousCrossovers:
for i in range(len(crossovers)-1):
ni,bpi = crossovers[i]
# for j in range(i+1,len(crossovers)):
for j in range(i+1,i+2): # Just look at adjacent crossovers
assert(j == i+1)
nj,bpj = crossovers[j]
bpsBetween = bpj-bpi
if bpsBetween < 60:
for xo1 in ni.xovers:
for xo2 in nj.xovers:
assert( bpsBetween != 0 )
angles.add( ((xo1[0], ni, nj), bpsBetween) )
angles.add( ((ni, nj, xo2[0]), bpsBetween) )
dihedrals.add( ((xo1[0], ni, nj, xo2[0]), bpsBetween, xo1[1], xo2[1]) )
else:
break
return angles, dihedrals
def _removeIntrahelicalConnectionsAbove(self, cutoff):
bonds = self._getIntrahelicalBonds()
for b in bonds:
n1,n2 = b[0]
r2 = np.sum( (n1.position - n2.position)**2 )
if r2 > cutoff**2:
if n1.above == n2:
assert(n2.below == n1)
n1.above = None
n2.below = None
elif n2.above == n1:
assert(n1.below == n2)
n1.below = None
n2.above = None
else:
raise
def _removeCrossoversAbove(self, cutoff):
# bonds = self._getCrossoverBonds()
for hid,hlx in self:
for zid,n1 in hlx:
newXovers = []
for xo in n1.xovers:
n2 = xo[0]
r2 = np.sum( (n1.position - n2.position)**2 )
if r2 < cutoff**2:
newXovers.append(xo)
n1.xovers = newXovers
# def _getBonds(self):
# bonds = self._getIntrahelicalBonds()
# bonds.update( self._getCrossoverBonds() )
# bonds.update( self._getSsCrossoverBonds() )
# return bonds
# -------------------------- #
# Methods for prinitng model #
# -------------------------- #
def writePdb(self, filename):
with open(filename,'w') as fh:
## Write header
fh.write("CRYST1 1000. 1000. 1000. 90.00 90.00 90.00 P 1 1\n")
## Write coordinates
formatString = "ATOM {:>5d} {:^4s}{:1s}{:3s} {:1s}{:>5s} {:8.3f}{:8.3f}{:8.3f}{:6.2f}{:6.2f}{:2s}{:2f}\n"
for n,hid,zid in self.particles:
## http://www.wwpdb.org/documentation/file-format-content/format33/sect9.html#ATOM
idx = n.idx
name = n.type
resname = name[:3]
chain = "A"
charge = 0
occ = hid
beta = zid
x,y,z = [x for x in n.position]
assert(idx < 1e5)
resid = "{:<4d}".format(idx)
fh.write( formatString.format(
idx, name[:1], "", resname, chain, resid, x, y, z, occ, beta, "", charge ))
return
def writePsf(self, filename):
with open(filename,'w') as fh:
## Write header
fh.write("PSF NAMD\n\n") # create NAMD formatted psf
## ATOMS section
idx=1
for hid,hlx in self:
for x in hlx:
idx += 1
idx += len(hlx.orientationNodes)
# for x in hlx.orientationNodes.items():
# idx += 1
fh.write("{:>8d} !NATOM\n".format(idx-1))
## From vmd/plugins/molfile_plugin/src/psfplugin.c
## "%d %7s %10s %7s %7s %7s %f %f"
formatString = "{idx:>8d} {segname:7s} {resid:<10s} {resname:7s}" + \
" {name:7s} {type:7s} {charge:f} {mass:f}\n"
for n,hid,zid in self.particles:
idx = n.idx + 1
data = dict(
idx = idx,
segname = "A",
resid = "%d%c%c" % (idx," "," "), # TODO: work with large indeces
name = n.type[:1],
resname = n.type[:3],
type = n.type[:1],
charge = 0,
mass = 100,
)
fh.write(formatString.format( **data ))
fh.write("\n")
## Write out bonds
bonds = self.bonds
fh.write("{:>8d} !NBOND\n".format(len(bonds)))
counter = 0
for n1,n2,pot in bonds:
fh.write( "{:d} {:d} ".format(n1.idx+1,n2.idx+1) )
counter += 1
if counter == 3:
fh.write("\n")
counter = 0
fh.write("\n")
return
def writeArbdFiles(self, prefix, numSteps=100000000, timestep=100e-6):
## TODO: save and reference directories and prefixes using member data
d = "potentials"
self._writeArbdCoordFile( prefix + ".coord.txt" )
self._writeArbdBondFile( prefix, directory = d )
self._writeArbdAngleFile( prefix, directory = d )
self._writeArbdDihedralFile( prefix, directory = d )
self._writeArbdExclFile( prefix + ".excludes.txt" )
self._writeArbdPotentialFiles( prefix, directory = d )
self._writeArbdConf( prefix, numSteps, timestep, "%s/%s-" % (d,prefix) )
def _writeArbdCoordFile(self, filename):
with open(filename,'w') as fh:
for n,hid,zid in self.particles:
fh.write("%f %f %f\n" % tuple(x for x in n.position))
def _writeArbdConf(self, prefix, numSteps=100000000, timestep=100e-6, potentialPrefix='' ):
## TODO: raise exception if _writeArbdPotentialFiles has not been called
filename = "%s.bd" % prefix
with open(filename,'w') as fh:
fh.write("""# seed 1234
timestep %f
steps %d
numberFluct 0
interparticleForce 1
fullLongRange 0
temperature 291
electricField 0.0
outputPeriod 1000
outputEnergyPeriod 1000
outputFormat dcd
decompPeriod 50000
cutoff 40.0
pairlistDistance 50
""" % (timestep, numSteps))
for x in self.getParticleTypesAndCounts():
fh.write("\nparticle %s\nnum %d\n" % x)
## TODO: look up better values in dictionary for particle types
fh.write("gridFile null.dx\ndiffusion 150\n")
fh.write("\ninputCoordinates %s.coord.txt\n" % prefix )
if os.path.exists("test.0.restart"):
fh.write("restartCoordinates test.0.restart\n" )
fh.write("""\n## Interaction potentials
tabulatedPotential 1
## The i@j@file syntax means particle type i will have NB interactions with particle type j using the potential in file
""")
for pair,f in zip(self._particleTypePairIter(), self._nbParamFiles):
i,j,t1,t2 = pair
fh.write("tabulatedFile %d@%d@%s\n" % (i,j,f))
fh.write("\n")
for f in list(set([b[-1].filename(potentialPrefix) for b in self.bonds])):
fh.write("tabulatedBondFile %s\n" % f)
fh.write("\n")
for f in list(set([b[-1].filename(potentialPrefix) for b in self.angles])):
fh.write("tabulatedAngleFile %s\n" % f)
fh.write("\n")
for f in list(set([b[-1].filename(potentialPrefix) for b in self.dihedrals])):
fh.write("tabulatedDihedralFile %s\n" % f)
fh.write("""\n## Files that specify connectivity of particles
inputBonds {prefix}.bonds.txt
inputAngles {prefix}.angles.txt
inputDihedrals {prefix}.dihedrals.txt
inputExcludes {prefix}.excludes.txt
""".format( prefix=prefix ))
with open("null.dx",'w') as fh:
fh.write("""object 1 class gridpositions counts 2 2 2
origin -4000.00000 -4000.00000 -4000.00000
delta 8000.00000 0.000000 0.000000
delta 0.000000 8000.00000 0.000000
delta 0.000000 0.000000 8000.00000
object 2 class gridconnections counts 2 2 2
object 3 class array type float rank 0 items 8 data follows
0.0 0.0 0.0
0.0 0.0 0.0
0.0 0.0
attribute "dep" string "positions"
object "density" class field
component "positions" value 1
component "connections" value 2
component "data" value 3
""")
def getParticleTypesAndCounts(self):
return sorted( self.particleTypeCounts.items(), key=lambda x: x[0] )
def _particleTypePairIter(self):
typesAndCounts = self.getParticleTypesAndCounts()
for i in range(len(typesAndCounts)):
t1 = typesAndCounts[i][0]
for j in range(i,len(typesAndCounts)):
t2 = typesAndCounts[j][0]
yield( (i,j,t1,t2) )
def _writeArbdPotentialFiles(self, prefix, directory = "potentials"):
## TODO: remove reduncant directory calls
try:
os.makedirs(directory)
except OSError:
if not os.path.isdir(directory):
raise
pathPrefix = "%s/%s-" % (directory,prefix)
self._writeNonbondedParameterFiles( pathPrefix + "nb" )
self._writeBondParameterFiles( pathPrefix )
self._writeAngleParameterFiles( pathPrefix )
self._writeDihedralParameterFiles( pathPrefix )
def _writeHarmonicPotentialFile(self, filename, k, x0, resolution=0.1, xmin=0, xmax=35, maxForce=None, periodicity=None):
x = np.arange( xmin, xmax+resolution*2, resolution )
if periodicity is None:
dx = x-x0
else:
dx = np.mod( x-x0 + 0.5*periodicity, periodicity) - 0.5*periodicity
u = 0.5*k*dx**2
if maxForce is not None:
assert(maxForce > 0)
f = np.diff(u)/np.diff(x)
f[f>maxForce] = maxForce
f[f<-maxForce] = -maxForce
u[0] = 0
u[1:] = np.cumsum(f*np.diff(x))
np.savetxt( filename, np.array([x, u]).T, fmt="%f" )
def _writeNonbondedParameterFiles(self, prefix):
x = np.arange(0, 50, 0.1)
for i,j,t1,t2 in self._particleTypePairIter():
f = "%s.%s-%s.dat" % (prefix, t1, t2)
if t1 == "O" or t2 == "O":
y = np.zeros(np.shape(x))
else:
bps1,bps2 = [float( t[1:] )/10 for t in (t1,t2)]
y = nbPot.nbPot(x, bps1, bps2)
np.savetxt( f, np.array([x, y]).T )
self._nbParamFiles.append(f)
def _writeBondParameterFiles(self, prefix):
for pot in list(set([item[-1] for item in self.bonds])):
pot.write_file(prefix)
def _writeAngleParameterFiles(self, prefix):
for pot in list(set([item[-1] for item in self.angles])):
pot.write_file(prefix)
def _writeDihedralParameterFiles(self, prefix):
for pot in list(set([item[-1] for item in self.dihedrals])):
pot.write_file(prefix)
def addBond(self, *args):
self.bonds.add(args)
def addAngle(self, *args):
self.angles.add(args)
def addDihedral(self, *args):
self.dihedrals.add(args)
def _buildBonds(self, prefix, directory="potentials"):
self.bonds = set()
## Get intrahelical bonds
for nodes,seps in self._getIntrahelicalBonds():
n1,n2 = nodes
sep, = seps
if n1.type[0] == "d" and n2.type[0] == "d":
k = 10.0/sqrt(sep) # TODO: determine from simulations
d = 3.4*sep
else:
## TODO: get correct numbers from ssDNA model
k = 1.0/sqrt(sep)
d = 5*sep
self.addBond(n1, n2, Bond(k, d))
## Get crossover bonds
for nodes,fwds in self._getCrossoverBonds():
n1,n2 = nodes
self.addBond(n1, n2, Bond(4, 18.5))
## Get crossover bonds
for nodes,fwds in self._getSsCrossoverBonds():
n1,n2 = nodes
self.addBond(n1, n2, Bond(1, 5))
## Get crossover bonds
for nodes,seps in self._getOrientationBonds():
n1,n2 = nodes
self.addBond(n1, n2, Bond(30, 1)) # TODO: improve params
def _buildAngles(self, prefix, directory="potentials"):
kT = 0.58622522 # kcal/mol
for nodes,seps in self._getIntrahelicalAngles():
n1,n2,n3 = nodes
sep1,sep2 = seps
sep = sep1+sep2
if n1.type[0] == "d" and n2.type[0] == "d" and n3.type[0] == "d":
## <cos(q)> = exp(-s/Lp) = integrate( x^4 exp(-A x^2) / 2, {x, 0, pi} ) / integrate( x^2 exp(-A x^2), {x, 0, pi} )
## <cos(q)> ~ 1 - 3/4A
## where A = k_spring / (2 kT)
k = 1.5 * kT * (1.0 / (1-exp(-float(sep)/147))) * 0.00030461742; # kcal_mol/degree^2
# k *= 5
else:
## TODO: get correct number from ssDNA model
k = 1.5 * kT * (1.0 / (1-exp(-float(sep)/3))) * 0.00030461742; # kcal_mol/degree^2
## Intrahelical 180 deg orientation angles
if None not in [n.orientationNode for n in nodes]:
k *= 0.5 # halve spring constant because using 2 springs
args = [n.orientationNode for n in nodes]
args.append( Angle(k,180) )
self.addAngle( *args )
self.addAngle( n1,n2,n3,Angle(k,180) )
a,d = self._getCrossoverAnglesAndDihedrals()
for nodes,sep in a:
n1,n2,n3 = nodes
k = (1.0/2) * 1.5 * kT * (1.0 / (1-exp(-float(sep)/147))) * 0.00030461742; # kcal_mol/degree^2
self.addAngle( n1,n2,n3,Angle(k,90) )
## Intrahelical 90 deg orientation angles
for nodes,seps in self._getOrientationAngles():
n1,n2,n3 = nodes
sep = np.sum(seps)
k = (1.0/2) * 1.5 * kT * (1.0 / (1-exp(-float(sep)/147))) * 0.00030461742; # kcal_mol/degree^2
self.addAngle( n1,n2,n3,Angle(k,90) )
## Crossover orientation angles
for nodes,fwds in self._getCrossoverBonds():
n1,n2 = nodes
f1,f2 = fwds
o1,o2 = [n.orientationNode for n in nodes]
k = (1.0/2) * 1.5 * kT * (1.0 / (1-exp(-float(1)/147))) * 0.00030461742; # kcal_mol/degree^2
if o1 is not None:
t0 = 90 + 60
if f1: t0 -= 120
self.addAngle( o1,n1,n2,Angle(k,t0) )
if o2 is not None:
t0 = 90 + 60
if f2: t0 -= 120
self.addAngle( n1,n2,o2,Angle(k,t0) )
def _buildDihedrals(self, prefix, directory="potentials"):
kT = 0.58622522 # kcal/mol
a,d = self._getCrossoverAnglesAndDihedrals()
for nodes,sep,isFwd1,isFwd2 in d:
n1,n2,n3,n4 = nodes
## <cos(q)> = exp(-s/Lp) = integrate( cos[x] exp(-A x^2), {x, 0, pi} ) / integrate( exp(-A x^2), {x, 0, pi} )
## Assume A is small
## int[B_] := Normal[Integrate[ Series[Cos[x] Exp[-B x^2], {B, 0, 1}], {x, 0, \[Pi]}]/
## Integrate[Series[Exp[-B x^2], {B, 0, 1}], {x, 0, \[Pi]}]]
## Actually, without assumptions I get fitFun below
## From http://www.annualreviews.org/doi/pdf/10.1146/annurev.bb.17.060188.001405
## units "3e-19 erg cm/ 295 k K" "nm" =~ 73
Lp = self.twistPersistenceLength/0.34 # set semi-arbitrarily as there is a large spread in literature
fitFun = lambda x: np.real(erf( (4*np.pi*x + 1j)/(2*np.sqrt(x)) )) * np.exp(-1/(4*x)) / erf(2*np.sqrt(x)*np.pi) - exp(-sep/Lp)
k = opt.leastsq( fitFun, x0=exp(-sep/Lp) )
k = k[0][0] * 2*kT*0.00030461742
# intrinsicDegrees=30
# fitFun = lambda x: (1.0/(2*x) - 2*np.sqrt(np.pi)*np.exp(-4*np.pi**2*x) / (np.sqrt(x)*erf(2*np.pi*np.sqrt(x))) ) - \
# ( (intrinsicDegrees*np.pi/180)**2 + 2*(1-exp(-sep/Lp)) )
# k = opt.leastsq( fitFun, x0=1/(1-exp(-sep/Lp)) )
# k = k[0][0] * 2*kT*0.00030461742
t0 = sep*(360.0/10.5)
# pdb.set_trace()
if isFwd1[0]: t0 -= 120
if isFwd2[0]: t0 += 120
t0 = t0 % 360
# if n2.idx == 0:
# print( n1.idx,n2.idx,n3.idx,n4.idx,k,t0,sep )
self.addDihedral( n1,n2,n3,n4,Dihedral(k,t0) )
for nodes,seps in self._getOrientationDihedrals():
n1,n2,n3,n4 = nodes
sep = seps[1]
t0 = sep*(360.0/10.5)
Lp = self.twistPersistenceLength/0.34 # set semi-arbitrarily as there is a large spread in literature
fitFun = lambda x: np.real(erf( (4*np.pi*x + 1j)/(2*np.sqrt(x)) )) * np.exp(-1/(4*x)) / erf(2*np.sqrt(x)*np.pi) - exp(-sep/Lp)
k = opt.leastsq( fitFun, x0=exp(-sep/Lp) )
k = k[0][0] * 2*kT*0.00030461742
# k *= 0.1
# k *= 0
self.addDihedral( n1,n2,n3,n4,Dihedral(k,t0,maxPotential=1) )
## Crossover dihedral angles
for nodes,fwds in self._getCrossoverBonds():
n1,n2 = nodes
f1,f2 = fwds
o1,o2 = [n.orientationNode for n in nodes]
a1,a2 = [n.nodeAbove for n in nodes]
b1,b2 = [n.nodeBelow for n in nodes]
k = (1.0/2) * 1.5 * kT * (1.0 / (1-exp(-float(1)/147))) * 0.00030461742; # kcal_mol/degree^2
if o1 is not None:
t0 = 90
# if f1: t0 = -90
if a2 is not None:
self.addDihedral( o1,n1,n2,a2,Dihedral(k,t0) )
if o2 is not None:
t0 = 90
# if f2: t0 = -90
if a1 is not None:
self.addDihedral( o2,n2,n1,a1,Dihedral(k,t0) )
if o1 is not None and o2 is not None:
if a1 is not None and a2 is not None:
t0 = 0
self.addDihedral( a1,n1,n2,a2,Dihedral(k,t0) )
elif b1 is not None and b2 is not None:
t0 = 0
self.addDihedral( b1,n1,n2,b2,Dihedral(k,t0) )
elif b1 is not None and a2 is not None:
t0 = 180
self.addDihedral( b1,n1,n2,a2,Dihedral(k,t0) )
elif a1 is not None and b2 is not None:
t0 = 180
self.addDihedral( a1,n1,n2,b2,Dihedral(k,t0) )
def _writeArbdBondFile(self, prefix, directory="potentials"):
filename = prefix + ".bonds.txt"
prefix = "%s/%s-" % (directory,prefix)
with open(filename,'w') as fh:
for n1,n2,pot in self.bonds:
fh.write("BOND ADD %d %d %s\n" % (n1.idx, n2.idx, pot.filename(prefix)))
def _writeArbdAngleFile(self, prefix, directory="potentials"):
filename = prefix + ".angles.txt"
prefix = "%s/%s-" % (directory,prefix)
with open(filename,'w') as fh:
for n1,n2,n3,pot in self.angles:
fh.write("ANGLE %d %d %d %s\n" % (n1.idx, n2.idx, n3.idx, pot.filename(prefix)))
def _writeArbdDihedralFile(self, prefix, directory="potentials"):
filename = prefix + ".dihedrals.txt"
prefix = "%s/%s-" % (directory,prefix)
with open(filename,'w') as fh:
for n1,n2,n3,n4,pot in self.dihedrals:
fh.write("DIHEDRAL %d %d %d %d %s\n" % (n1.idx, n2.idx, n3.idx, n4.idx, pot.filename(prefix)))
def _writeArbdExclFile(self, filename):
## Exclude all 1-4 intrahelical nodes
# e = 4
e = 8
exclusions = { (nodes[i],nodes[j])
for nodes,seps in self._getIntrahelicalNodeSeries(e)
for i in range(e-1)
for j in range(i,e) }
## TODO, make exclusions depend on distance
## Exclude ssDNA contacts
for nodes,seps in self._getSsCrossoverBonds():
n1,n2 = nodes # recall that nodes is sorted by .idx
exclusions.add( nodes )
exclusions.update( [(n1,n) for n in (n2.nodeBelow,n2.nodeAbove) if n is not None] )
exclusions.update( [(n,n2) for n in (n1.nodeBelow,n1.nodeAbove) if n is not None] )
## Exclude crossovers and nearby
for nodes,fwds in self._getCrossoverBonds():
n1,n2 = nodes # recall that nodes is sorted by .idx
exclusions.add( nodes )
exclusions.update( [(n1,n) for n in (n2.nodeBelow,n2.nodeAbove) if n is not None] )
exclusions.update( [(n,n2) for n in (n1.nodeBelow,n1.nodeAbove) if n is not None] )
## Write exclusions
with open(filename,'w') as fh:
for n1,n2 in exclusions:
fh.write( "EXCLUDE %d %d\n" % (n1.idx,n2.idx) )
def _getNonbondedPotential(self,x,a,b):
return a*(np.exp(-x/b))