Commit 1eb40b0b authored by cmaffeo2's avatar cmaffeo2
Browse files

Fixed some small bugs

parent 8cd55d13
......@@ -411,7 +411,7 @@ class PdbModel(Transformable, Parent):
fh.write("CRYST1 {:>5f} {:>5f} {:>5f} 90.00 90.00 90.00 P 1 1\n".format( *self.dimensions ))
## Write coordinates
formatString = "ATOM {:>5d} {:^4s}{:1s}{:3s} {:1s}{:>5s} {:8.3f}{:8.3f}{:8.3f}{:6.2f}{:6.2f}{:2s}{:2f}\n"
formatString = "ATOM {:>5d} {:^4.4s}{:1.1s}{:3.3s} {:1.1s}{:>5.5s} {:8.3f}{:8.3f}{:8.3f}{:6.2f}{:6.2f}{:2.2s}{:2f}\n"
for p in self.particles:
## http://www.wwpdb.org/documentation/file-format-content/format33/sect9.html#ATOM
idx = p.idx+1
......@@ -453,8 +453,8 @@ class PdbModel(Transformable, Parent):
## From vmd/plugins/molfile_plugin/src/psfplugin.c
## "%d %7s %10s %7s %7s %7s %f %f"
formatString = "{idx:>8d} {segname:7s} {resid:<10s} {resname:7s}" + \
" {name:7s} {type:7s} {charge:f} {mass:f}\n"
formatString = "{idx:>8d} {segname:7.7s} {resid:<10.10s} {resname:7.7s}" + \
" {name:7.7s} {type:7.7s} {charge:f} {mass:f}\n"
for p in self.particles:
idx = p.idx + 1
try:
......@@ -714,6 +714,8 @@ class ArbdModel(PdbModel):
for k,v in zip('XYZ', self.dimensions):
params['origin'+k] = -v*0.5
params['dim'+k] = v
params['pairlistDistance'] -= params['cutoff']
## Actually write the file
with open(filename,'w') as fh:
......@@ -742,6 +744,7 @@ systemSize {dimX} {dimY} {dimZ}
## Write entries for each type of particle
for pt,num in self.getParticleTypesAndCounts():
## TODO create new particle types if existing has grid
particleParams = pt.__dict__.copy()
particleParams['num'] = num
fh.write("""
......@@ -750,8 +753,14 @@ num {num}
diffusion {diffusivity}
""".format(**particleParams))
if 'grid' in particleParams:
for g in pt['grid']:
if not isinstance(pt.grid, list): pt.grid = [pt.grid]
for g in pt.grid:
fh.write("gridFile {}\n".format(g))
## TODO: make this prettier? multiple scaling factors?
gridFileScale = 1.0
if 'gridFileScale' in pt.__dict__: gridFileScale = pt.gridFileScale
fh.write("gridFileScale {}\n".format(gridFileScale))
else:
fh.write("gridFile null.dx\n")
......
......@@ -18,6 +18,7 @@ def maxForce(x,u,maxForce=40):
f[ids] = -maxForce
u = np.hstack( ((0), np.cumsum(-f*dx)) )
assert( np.any( x > 40 ) )
ids = np.where(x > 40)[0]
u = u - np.mean(u[ids])
......
......@@ -191,14 +191,14 @@ class SegmentParticle(PointParticle):
if seg == self.parent:
return seg.contour_to_nt_pos(self.contour_position)
else:
cl = [e for e in self.parent.get_connections_and_locations() in B.container is seg]
dc = [(self.contour_position - A.address)**2 for c,A,B in e]
cl = [e for e in self.parent.get_connections_and_locations() if e[2].container is seg]
dc = [(self.contour_position - A.address)**2 for c,A,B in cl]
if len(dc) == 0:
pdb.set_trace()
i = np.argmin(dc)
c,A,B = cl[i]
## TODO: generalize, removing np.abs and conditional
delta_nt = np.abs( A.container.contour_to_nt_pos(self.contour_position - A.address) )
B_nt_pos = seg.contour_to_nt_pos(B.address)
......@@ -318,9 +318,9 @@ class Segment(ConnectableElement, Group):
axis = self.contour_to_tangent(s)
orientation = rotationAboutAxis( axis, self.twist_per_nt*self.contour_to_nt_pos(s), normalizeAxis=True )
def get_contour_sorted_connections_and_locations(self):
def get_contour_sorted_connections_and_locations(self,type_):
sort_fn = lambda c: c[1].address
cl = self.get_connections_and_locations()
cl = self.get_connections_and_locations(type_)
return sorted(cl, key=sort_fn)
def randomize_unset_sequence(self):
......@@ -778,6 +778,9 @@ class DoubleStrandedSegment(Segment):
pos = self.contour_to_position(contour_position)
if self.local_twist:
orientation = self.contour_to_orientation(contour_position)
if orientation is None:
print("WARNING: local_twist is True, but orientation is None; using identity")
orientation = np.eye(3)
opos = pos + orientation.dot( np.array((Segment.orientation_bond.r0,0,0)) )
o = SegmentParticle( Segment.orientation_particle, opos, nts,
num_nts=nts, parent=self )
......@@ -998,7 +1001,7 @@ class SegmentModel(ArbdModel):
# self.max_nucleotides_per_bead = max_nucleotides_per_bead # ssDNA
self.children = self.segments = segments
self._bonded_potential = dict() # cache bonded potentials
self._bonded_potential = dict() # cache for bonded potentials
self._generate_bead_model( max_basepairs_per_bead, max_nucleotides_per_bead, local_twist, escapable_twist)
......@@ -1116,14 +1119,18 @@ class SegmentModel(ArbdModel):
for s in self.segments:
cabs = s.get_connections_and_locations("intrahelical")
if np.any( [B.particle is None for c,A,B in cabs] ):
print( "WARNING: none type found in connection, skipping" )
cabs = [e for e in cabs if e[2].particle is not None]
beads = list(set(s.beads + [A.particle for c,A,B in cabs]))
## Add nearby beads
for c,A,B in cabs:
## TODOTODO test?
bs = filter( B.particle.intrahelical_neighbors, lambda x: x is not None and x not in beads )
filter_fn = lambda x: x is not None and x not in beads
bs = list( filter( filter_fn, B.particle.intrahelical_neighbors ) )
beads.extend(bs)
bs = filter( bs.intrahelical_neighbors, lambda x: x is not None and x not in beads )
bs = list( filter( filter_fn, [n for b in bs for n in b.intrahelical_neighbors] ) )
beads.extend(bs)
......@@ -1438,7 +1445,8 @@ class SegmentModel(ArbdModel):
twist_per_nt = 0.5 * (p1.twist_per_nt + p2.twist_per_nt)
angle = sep*twist_per_nt
if angle > 360 or angle < -360:
raise Exception("The twist between beads is too large")
print("WARNING: twist angle out of normal range... proceeding anyway")
# raise Exception("The twist between beads is too large")
k = self._get_twist_spring_constant(sep)
if escapable_twist:
......@@ -1543,9 +1551,7 @@ class SegmentModel(ArbdModel):
for s in self.segments:
for cl in s.get_contour_sorted_connections_and_locations("crossover"):
...
def _generate_strands(self):
self.strands = strands = []
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment