Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
ncfrey committed May 21, 2024
1 parent b9dab98 commit a339ebf
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 15 deletions.
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,14 @@ module = [
]

[tool.ruff]
select = [
lint.select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"I", # isort
"B", # flake8-bugbear
]
ignore = [
lint.ignore = [
"E501", # line too long, handled by black
"B008", # do not perform function calls in argument defaults
"B905", # requires python >= 3.10
Expand All @@ -81,7 +81,7 @@ exclude = [
"tests"
]

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"__init__.py" = [
"F401", # MODULE IMPORTED BUT UNUSED
]
Expand Down
5 changes: 2 additions & 3 deletions src/lobster/transforms/_structure.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import torch


def trim_or_pad(tensor: torch.Tensor, pad_to: int, pad_idx: int = 0):
"""Trim or pad a tensor with shape (L, ...) to a given length.
"""
"""Trim or pad a tensor with shape (L, ...) to a given length."""
L = tensor.shape[0]
if L >= pad_to:
# trim, assuming first dimension is the dim to trim
Expand All @@ -16,4 +16,3 @@ def trim_or_pad(tensor: torch.Tensor, pad_to: int, pad_idx: int = 0):
)
tensor = torch.concat((tensor, padding), dim=0)
return tensor

20 changes: 11 additions & 9 deletions src/lobster/transforms/_structure_featurizer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import typing as T
from pathlib import Path
import warnings
from pathlib import Path

import numpy as np
import torch
Expand All @@ -9,10 +9,10 @@
OFProtein,
atom37_to_frames,
get_backbone_frames,
make_atom14_masks,
make_atom14_positions,
make_pdb_features,
protein_from_pdb_string,
make_atom14_masks,
make_atom14_positions
)
from lobster.transforms import trim_or_pad

Expand Down Expand Up @@ -47,7 +47,9 @@ def _openfold_features_from_pdb(

return protein_features

def _process_structure_features(self, features: T.Dict[str, np.ndarray], seq_len: T.Optional[int] = None):
def _process_structure_features(
self, features: T.Dict[str, np.ndarray], seq_len: T.Optional[int] = None
):
"""Process feature dtypes and pad to max length for a single sequence."""
features_requiring_padding = [
"aatype",
Expand All @@ -69,7 +71,7 @@ def _process_structure_features(self, features: T.Dict[str, np.ndarray], seq_len
features[k] = torch.from_numpy(v)

# Trim or pad to a fixed length for all per-specific features
if (k in features_requiring_padding) and (not seq_len is None):
if (k in features_requiring_padding) and (seq_len is not None):
features[k] = trim_or_pad(features[k], seq_len)

# 'seq_length' is a tensor with shape equal to the aatype array length,
Expand All @@ -83,8 +85,8 @@ def _process_structure_features(self, features: T.Dict[str, np.ndarray], seq_len
features["mask"] = mask.long()

# Make sure input sequence string is also trimmed
if not seq_len is None:
features['sequence'] = features['sequence'][:seq_len]
if seq_len is not None:
features["sequence"] = features["sequence"][:seq_len]

features["aatype"] = features["aatype"].argmax(dim=-1)
return features
Expand All @@ -93,11 +95,11 @@ def __call__(self, pdb_str: str, seq_len: int, pdb_id: T.Optional[str] = None):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
features = self._openfold_features_from_pdb(pdb_str, pdb_id)

features = self._process_structure_features(features, seq_len)
features = atom37_to_frames(features)
features = get_backbone_frames(features)
features = make_atom14_masks(features)
features = make_atom14_positions(features)

return features
return features

0 comments on commit a339ebf

Please sign in to comment.