From 66d0b785a4d260164ef2385b717b51a49722d1cf Mon Sep 17 00:00:00 2001 From: Chris Maffeo <cmaffeo2@illinois.edu> Date: Thu, 10 Oct 2019 12:12:28 -0500 Subject: [PATCH] Started renaming some keywords --- arbdmodel/__init__.py | 24 ++--- arbdmodel/interactions.py | 6 +- arbdmodel/polymer.py | 159 ++++++++++++++++++++++++++++++ arbdmodel/sali_polymer_model.py | 167 ++++++++++++++++++++++++++++++++ 4 files changed, 342 insertions(+), 14 deletions(-) create mode 100644 arbdmodel/polymer.py create mode 100644 arbdmodel/sali_polymer_model.py diff --git a/arbdmodel/__init__.py b/arbdmodel/__init__.py index 1e396a0..b48e179 100644 --- a/arbdmodel/__init__.py +++ b/arbdmodel/__init__.py @@ -676,7 +676,7 @@ class PdbModel(Transformable, Parent): class ArbdModel(PdbModel): def __init__(self, children, dimensions=(1000,1000,1000), temperature=291, timestep=50e-6, particle_integrator = 'Brown', - cutoff=50, decompPeriod=1000, pairlistDistance=None, nonbondedResolution=0.1, + cutoff=50, decomp_period=1000, pairlist_distance=None, nonbonded_resolution=0.1, remove_duplicate_bonded_terms=True, extra_bd_file_lines=""): PdbModel.__init__(self, children, dimensions, remove_duplicate_bonded_terms) self.temperature = temperature @@ -686,11 +686,11 @@ class ArbdModel(PdbModel): self.particle_integrator = particle_integrator - if pairlistDistance == None: - pairlistDistance = cutoff+30 + if pairlist_distance == None: + pairlist_distance = cutoff+30 - self.decompPeriod = decompPeriod - self.pairlistDistance = pairlistDistance + self.decomp_period = decomp_period + self.pairlist_distance = pairlist_distance self.extra_bd_file_lines = extra_bd_file_lines @@ -752,9 +752,12 @@ class ArbdModel(PdbModel): # self.initialCoords = np.array([p.initialPosition for p in self.particles]) def useNonbondedScheme(self, nbScheme, typeA=None, typeB=None): - self.nbSchemes.append( (nbScheme, typeA, typeB) ) + self.add_nonbonded_scheme(nbScheme, typeA, typeB) + + def add_nonbonded_scheme(self, nonbonded_scheme, typeA=None, typeB=None): + self.nbSchemes.append( (nonbonded_scheme, typeA, typeB) ) if typeA != typeB: - self.nbSchemes.append( (nbScheme, typeB, typeA) ) + self.nbSchemes.append( (nonbonded_scheme, typeB, typeA) ) def simulate(self, output_name, output_directory='output', num_steps=100000000, timestep=None, gpu=0, output_period=1e4, arbd=None, directory='.', restart_file=None, replicas=1, log_file=None, dry_run = False): assert(type(gpu) is int) @@ -935,7 +938,7 @@ class ArbdModel(PdbModel): params['origin'+k] = -v*0.5 params['dim'+k] = v - params['pairlistDistance'] -= params['cutoff'] + params['pairlist_distance'] -= params['cutoff'] """ Find rigid groups """ rigid_groups = [] @@ -975,9 +978,9 @@ outputEnergyPeriod {outputPeriod} outputFormat dcd ## Infrequent domain decomposition because this kernel is still very slow -decompPeriod {decompPeriod} +decompPeriod {decomp_period} cutoff {cutoff} -pairlistDistance {pairlistDistance} +pairlistDistance {pairlist_distance} origin {originX} {originY} {originZ} systemSize {dimX} {dimY} {dimZ} @@ -1324,4 +1327,3 @@ run {num_steps:d} self.writePsf( output_name + ".psf" ) self.write_namd_configuration( output_name, output_directory = output_directory ) os.sync() - diff --git a/arbdmodel/interactions.py b/arbdmodel/interactions.py index 8b7ae92..1c4b9b6 100644 --- a/arbdmodel/interactions.py +++ b/arbdmodel/interactions.py @@ -25,14 +25,14 @@ class NonbondedScheme(): class LennardJones(NonbondedScheme): def potential(self, r, typeA, typeB): - epsilon = sqrt( typeA.epsilon**2 + typeB.epsilon**2 ) + epsilon = np.sqrt( typeA.epsilon**2 + typeB.epsilon**2 ) r0 = 0.5 * (typeA.radius + typeB.radius) r6 = (r0/r)**6 r12 = r6**2 u = epsilon * (r12-2*r6) u[0] = u[1] # Remove NaN return u -LennardJones = LennardJones() +# LennardJones = LennardJones() class HalfHarmonic(NonbondedScheme): def potential(self, r, typeA, typeB): @@ -41,7 +41,7 @@ class HalfHarmonic(NonbondedScheme): u = 0.5 * k * (r-r0)**2 u[r > r0] = np.zeros( np.shape(u[r > r0]) ) return u -HalfHarmonic = HalfHarmonic() +# HalfHarmonic = HalfHarmonic() class TabulatedPotential(NonbondedScheme): def __init__(self, tableFile, typesA=None, typesB=None, resolution=0.1, rMin=0): diff --git a/arbdmodel/polymer.py b/arbdmodel/polymer.py new file mode 100644 index 0000000..40c684a --- /dev/null +++ b/arbdmodel/polymer.py @@ -0,0 +1,159 @@ +class PolymerParticle(PointParticle): + def __init__(self, type_, position, name="A", **kwargs): + self.name = name + self.contour_position = None + PointParticle.__init__(self, type_, position, name=name, **kwargs) + self.intrahelical_neighbors = [] + self.other_neighbors = [] + self.locations = [] + + def get_intrahelical_above(self): + """ Returns bead directly above self """ + # assert( len(self.intrahelical_neighbors) <= 2 ) + for b in self.intrahelical_neighbors: + if b.get_contour_position(self.parent, self.contour_position) > self.contour_position: + return b + + def get_intrahelical_below(self): + """ Returns bead directly below self """ + # assert( len(self.intrahelical_neighbors) <= 2 ) + for b in self.intrahelical_neighbors: + if b.get_contour_position(self.parent, self.contour_position) < self.contour_position: + return b + + def _neighbor_should_be_added(self,b): + if type(self.parent) != type(b.parent): + return True + + c1 = self.contour_position + c2 = b.get_contour_position(self.parent,c1) + if c2 < c1: + b0 = self.get_intrahelical_below() + else: + b0 = self.get_intrahelical_above() + + if b0 is not None: + c0 = b0.get_contour_position(self.parent,c1) + if np.abs(c2-c1) < np.abs(c0-c1): + ## remove b0 + self.intrahelical_neighbors.remove(b0) + b0.intrahelical_neighbors.remove(self) + return True + else: + return False + return True + + def make_intrahelical_neighbor(self,b): + add1 = self._neighbor_should_be_added(b) + add2 = b._neighbor_should_be_added(self) + if add1 and add2: + # assert(len(b.intrahelical_neighbors) <= 1) + # assert(len(self.intrahelical_neighbors) <= 1) + self.intrahelical_neighbors.append(b) + b.intrahelical_neighbors.append(self) + + def conceptual_get_position(self, context): + """ + context: object + +Q: does this function do too much? +Danger of mixing return values + +Q: does context describe the system or present an argument? + """ + + ## Validate Inputs + ... + + ## Algorithm + """ +context specifies: + - kind of output: real space, nt within segment, fraction of segment + - absolute or relative + - constraints: e.g. if passing through + """ + """ +given context, provide the position + input +""" + + def get_nt_position(self, seg, near_address=None): + """ Returns the "address" of the nucleotide relative to seg in + nucleotides, taking the shortest (intrahelical) contour length route to seg + """ + if seg == self.parent: + pos = self.contour_position + else: + pos = self.get_contour_position(seg,near_address) + return seg.contour_to_nt_pos(pos) + + def get_contour_position(self,seg, address = None): + """ TODO: fix paradigm where a bead maps to exactly one location in a polymer + - One way: modify get_contour_position to take an optional argument that indicates where in the polymer you are looking from + """ + + if seg == self.parent: + return self.contour_position + else: + cutoff = 30*3 + target_seg = seg + + ## depth-first search + ## TODO cache distances to nearby locations? + def descend_search_tree(seg, contour_in_seg, distance=0, visited_segs=None): + nonlocal cutoff + if visited_segs is None: visited_segs = [] + + if seg == target_seg: + # pdb.set_trace() + ## Found a segment in our target + sign = 1 if contour_in_seg == 1 else -1 + if sign == -1: assert( contour_in_seg == 0 ) + if distance < cutoff: # TODO: check if this does anything + cutoff = distance + return [[distance, contour_in_seg+sign*seg.nt_pos_to_contour(distance)]], [(seg, contour_in_seg, distance)] + if distance > cutoff: + return None,None + + ret_list = [] + hist_list = [] + ## Find intrahelical locations in seg that we might pass through + conn_locs = seg.get_connections_and_locations("intrahelical") + if isinstance(target_seg, SingleStrandedSegment): + tmp = seg.get_connections_and_locations("sscrossover") + conn_locs = conn_locs + list(filter(lambda x: x[2].container == target_seg, tmp)) + for c,A,B in conn_locs: + if B.container in visited_segs: continue + dx = seg.contour_to_nt_pos( A.address, round_nt=False ) - seg.contour_to_nt_pos( contour_in_seg, round_nt=False) + dx = np.abs(dx) + results,history = descend_search_tree( B.container, B.address, + distance+dx, visited_segs + [seg] ) + if results is not None: + ret_list.extend( results ) + hist_list.extend( history ) + return ret_list,hist_list + + results,history = descend_search_tree(self.parent, self.contour_position) + if results is None or len(results) == 0: + raise Exception("Could not find location in segment") # TODO better error + if address is not None: + return sorted(results,key=lambda x:(x[0],(x[1]-address)**2))[0][1] + else: + return sorted(results,key=lambda x:x[0])[0][1] + # nt_pos = self.get_nt_position(seg) + # return seg.nt_pos_to_contour(nt_pos) + + def update_position(self, contour_position): + self.contour_position = contour_position + self.position = self.parent.contour_to_position(contour_position) + if 'orientation_bead' in self.__dict__: + o = self.orientation_bead + o.contour_position = contour_position + orientation = self.parent.contour_to_orientation(contour_position) + if orientation is None: + print("WARNING: local_twist is True, but orientation is None; using identity") + orientation = np.eye(3) + o.position = self.position + orientation.dot( np.array((Segment.orientation_bond.r0,0,0)) ) + + def __repr__(self): + return "<SegmentParticle {} on {}[{:.2f}]>".format( self.name, self.parent, self.contour_position) diff --git a/arbdmodel/sali_polymer_model.py b/arbdmodel/sali_polymer_model.py new file mode 100644 index 0000000..3956a2e --- /dev/null +++ b/arbdmodel/sali_polymer_model.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +## Test with `python -m arbdmodel.hps_polymer_model` + +import numpy as np +import sys + + +## Local imports +from . import ArbdModel, ParticleType, PointParticle, Group, get_resource_path +from .abstract_polymer import PolymerSection, AbstractPolymerGroup +from .interactions import NonbondedScheme, HarmonicBond, HarmonicPotential +from .coords import quaternion_to_matrix + + +"""Define particle types""" +type_ = ParticleType("AA", + # units "297.15 k K / (6 pi 0.92 mPa s 6 AA)" "AA**2/ns" + diffusivity = 39.429314 +) + +## Bonded potentials +class LinearBond(HarmonicBond): + def __init__(self, k, r0, rRange=(0,50), resolution=0.1, maxForce=None, max_potential=None, prefix="potentials/"): + HarmonicPotential.__init__(self, k, r0, rRange, resolution, maxForce, max_potential, prefix) + self.type_ = "linearbond" + self.kscale_ = 1.0 + + def potential(self, dr): + return self.k*np.abs(dr) + +class SaliNonbonded(NonbondedScheme): + def __init__(self, resolution=0.1, rMin=0): + NonbondedScheme.__init__(self, typesA=None, typesB=None, resolution=resolution, rMin=rMin) + + def potential(self, r, typeA, typeB): + """ Constant force excluded volume """ + force = 10 # kcal_mol/AA + radius = 6 + + u = np.zeros(r.shape) + s = r < 2*radius + u[s] = (2*radius - r[s]) * force + return u + +class SaliBeadsFromPolymer(Group): + # p = PointParticle(_P, (0,0,0), "P") + # b = PointParticle(_B, (3,0,1), "B") + # nt = Group( name = "nt", children = [p,b]) + # nt.add_bond( i=p, j=b, bond = get_resource_path('two_bead_model/BPB.dat') ) + + def __init__(self, polymer, sequence=None, **kwargs): + + if sequence is None: + raise NotImplementedError + # ... set random sequence + + self.polymer = polymer + self.sequence = sequence + + for prop in ('segname','chain'): + if prop not in kwargs: + # import pdb + # pdb.set_trace() + try: + self.__dict__[prop] = polymer.__dict__[prop] + except: + pass + + if len(sequence) != polymer.num_monomers: + raise ValueError("Length of sequence does not match length of polymer") + Group.__init__(self, **kwargs) + + self.num_beads = int(np.ceil(polymer.num_monomers / 20)) + + def _clear_beads(self): + ... + + def _generate_beads(self): + # beads = self.children + nb = self.num_beads + + for i in range(nb): + # c = self.polymer.monomer_index_to_contour(i) + c = float(i)/(nb-1) if nb > 1 else 0.5 + r = self.polymer.contour_to_position(c) + + bead = PointParticle(type_, r, + resid = i+1) + self.add(bead) + + ## Two consecutive nts + for i in range(len(self.children)-1): + b1 = self.children[i] + b2 = self.children[i+1] + """ units "10 kJ/N_A" kcal_mol """ + bond = LinearBond(k = 1, + r0 = 18, + rRange = (0,500), + resolution = 0.01, + maxForce = 30) + self.add_bond( i=b1, j=b2, bond = bond, exclude=True ) + +class SaliModel(ArbdModel): + def __init__(self, polymers, + sequences = None, + debye_length = 10, + damping_coefficient = 10, + DEBUG=False, + **kwargs): + + """ + [debye_length]: angstroms + [damping_coefficient]: ns + """ + + print("WARNING: diffusion coefficient arbitrarily set to 100 AA**2/ns in SaliModel") + + kwargs['timestep'] = 1047e-6 + kwargs['cutoff'] = 18 + + if 'decompPeriod' not in kwargs: + kwargs['decompPeriod'] = 1000 + + """ Assign sequences """ + if sequences is None: + raise NotImplementedError("HpsModel must be provided a sequences argument") + + self.polymer_group = AbstractPolymerGroup(polymers) + self.sequences = sequences + ArbdModel.__init__(self, [], **kwargs) + + # """ Update type diffusion coefficients """ + # self.set_damping_coefficient( damping_coefficient ) + + """ Set up nonbonded interactions """ + nonbonded = SaliNonbonded() + self.useNonbondedScheme( nonbonded, typeA=type_, typeB=type_ ) + + """ Generate beads """ + self.generate_beads() + + def update_splines(self, coords): + i = 0 + for p,c in zip(self.polymer_group.polymers,self.children): + n = len(c.children) + p.set_splines(np.linspace(0,1,n), coords[i:i+n]) + i += n + + self.clear_all() + self.generate_beads() + ## TODO Apply restraints, etc + + def generate_beads(self): + self.peptides = [SaliBeadsFromPolymer(p,s) + for p,s in zip(self.polymer_group.polymers,self.sequences)] + + for s in self.peptides: + self.add(s) + s._generate_beads() + + # def set_damping_coefficient(self, damping_coefficient): + # for t in self.types: + # t.damping_coefficient = damping_coefficient + # # t.diffusivity = 831447.2 * temperature / (t.mass * damping_coefficient) + +if __name__ == "__main__": + pass -- GitLab