Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions pmda/hbond_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ def count_by_time(self):
indices /= self.step

counts = np.zeros_like(self.frames)
counts[indices.astype(np.int)] = tmp_counts
counts[indices.astype("int")] = tmp_counts
return counts

def count_by_type(self):
Expand All @@ -511,11 +511,11 @@ def count_by_type(self):
bond.
"""
u = self._universe()
d = u.atoms[self.hbonds[:, 1].astype(np.int)]
a = u.atoms[self.hbonds[:, 3].astype(np.int)]
d = u.atoms[self.hbonds[:, 1].astype("int")]
a = u.atoms[self.hbonds[:, 3].astype("int")]

tmp_hbonds = np.array([d.resnames, d.types, a.resnames, a.types],
dtype=np.str).T
dtype="str").T
hbond_type, type_counts = np.unique(tmp_hbonds, axis=0,
return_counts=True)
hbond_type_list = []
Expand Down Expand Up @@ -544,9 +544,9 @@ def count_by_ids(self):
"""

u = self._universe()
d = u.atoms[self.hbonds[:, 1].astype(np.int)]
h = u.atoms[self.hbonds[:, 2].astype(np.int)]
a = u.atoms[self.hbonds[:, 3].astype(np.int)]
d = u.atoms[self.hbonds[:, 1].astype("int")]
h = u.atoms[self.hbonds[:, 2].astype("int")]
a = u.atoms[self.hbonds[:, 3].astype("int")]

tmp_hbonds = np.array([d.ids, h.ids, a.ids]).T
hbond_ids, ids_counts = np.unique(tmp_hbonds, axis=0,
Expand Down
2 changes: 1 addition & 1 deletion pmda/leaflet.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def _find_connected_components(self, data, cutoff=15.0):
res[0] = res[0] + i_index - 1
res[1] = res[1] - num + j_index - 1
if res.shape[1] == 0:
res = np.zeros((2, 1), dtype=np.int)
res = np.zeros((2, 1), dtype="int")

edges = [(res[0, k], res[1, k]) for k in range(0, res.shape[1])]
graph.add_edges_from(edges)
Expand Down
6 changes: 3 additions & 3 deletions pmda/rdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def _single_frame(self, ts, atomgroups):
count = np.histogram(dist, **self.rdf_settings)[0]
volume = u.trajectory.ts.volume

return np.array([count, np.array(volume, dtype=np.float64)])
return np.array([count, np.array(volume, dtype="float64")])

def _conclude(self, ):
self.count = np.sum(self._results[:, 0])
Expand Down Expand Up @@ -292,7 +292,7 @@ def _prepare(self):
def _single_frame(self, ts, atomgroups):
ags = [[atomgroups[2*i], atomgroups[2*i+1]] for i in range(self.n)]
count = [np.zeros((ag1.n_atoms, ag2.n_atoms, self.len),
dtype=np.float64) for ag1, ag2 in ags]
dtype="float64") for ag1, ag2 in ags]
for i, (ag1, ag2) in enumerate(ags):
u = ag1.universe
pairs, dist = distances.capped_distance(ag1.positions,
Expand All @@ -306,7 +306,7 @@ def _single_frame(self, ts, atomgroups):

volume = u.trajectory.ts.volume

return np.array([np.array(count), np.array(volume, dtype=np.float64)])
return np.array([np.array(count), np.array(volume, dtype="float64")])

def _conclude(self):
self.count = np.sum(self._results[:, 0])
Expand Down
2 changes: 1 addition & 1 deletion pmda/rms/rmsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def _reduce(res, result_single_frame):
'sum' action for time series
"""
atoms = result_single_frame
positions = atoms.positions.astype(np.float64)
positions = atoms.positions.astype("float64")
# initial time step case
if isinstance(res, list) and len(res) == 0:
# initial mean position = initial position
Expand Down
8 changes: 4 additions & 4 deletions pmda/test/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,8 @@ def sumofsquares(a):
sos : array
`n x m` array of the sum of squares for 'n' atoms
"""
dev = a - np.mean(a, axis=0, dtype=np.float64)
sos = np.sum(dev**2, axis=0, dtype=np.float64)
dev = a - np.mean(a, axis=0, dtype="float64")
sos = np.sum(dev**2, axis=0, dtype="float64")
return sos


Expand All @@ -156,7 +156,7 @@ def pos():
"""Generates array of random positions in range [-100, 100]"""
return 200*(np.random.random(size=(100000,
1000,
3)) - 0.5).astype(np.float64)
3)) - 0.5).astype("float64")


@pytest.mark.parametrize('n_frames', [3, 4, 10, 19, 101, 331, 1000])
Expand Down Expand Up @@ -197,7 +197,7 @@ def test_fold_second_order_moments(pos, n_frames, n_blocks):
# slice "trajectory" pos into random length blocks to test more than two
# cases per iteration
blocks = [pos[i:j] for i, j in zip(start_indices, stop_indices)]
S = [(len(block), block.mean(axis=0, dtype=np.float64),
S = [(len(block), block.mean(axis=0, dtype="float64"),
sumofsquares(block)) for block in blocks]
# combine block results using fold method
results = fold_second_order_moments(S)
Expand Down
4 changes: 2 additions & 2 deletions pmda/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,8 +166,8 @@ def make_balanced_slices(n_frames, n_blocks, start=None, stop=None, step=None):
# not very useful but allows calling code to work more gracefully
return []

bsizes = np.ones(n_blocks, dtype=np.int64) * n_frames // n_blocks
bsizes += (np.arange(n_blocks, dtype=np.int64) < n_frames % n_blocks)
bsizes = np.ones(n_blocks, dtype="int64") * n_frames // n_blocks
bsizes += (np.arange(n_blocks, dtype="int64") < n_frames % n_blocks)
# This can give a last index that is larger than the real last index;
# this is not a problem for slicing but it's not pretty.
# Example: original [0:20:3] -> n_frames=7, start=0, step=3:
Expand Down