Skip to content

Commit

Permalink
Lint
Browse files Browse the repository at this point in the history
  • Loading branch information
jakobnissen committed Nov 7, 2023
1 parent b9489ae commit 2d028ad
Show file tree
Hide file tree
Showing 7 changed files with 5 additions and 18 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ requires = ["setuptools ~= 63.0", "Cython ~= 0.29.5"]
build-backend = "setuptools.build_meta"

[tool.ruff]
ignore = ["E501"]
ignore = ["E722", "E501"]

# pyproject.toml
[tool.pytest.ini_options]
Expand Down
6 changes: 0 additions & 6 deletions test/test_encode.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,12 +151,6 @@ def test_bad_args(self):
with self.assertRaises(ValueError):
vamb.encode.VAE(5, dropout=-0.001)

def test_samples_too_small(self):
vae = vamb.encode.VAE(self.rpkm.shape[1])
r = self.rpkm.copy()
t = self.tnfs.copy()
l = self.lens.copy()

def test_loss_falls(self):
vae = vamb.encode.VAE(self.rpkm.shape[1])
rpkm_copy = self.rpkm.copy()
Expand Down
1 change: 0 additions & 1 deletion test/test_semisupervised_encode.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import unittest
import numpy as np
import torch
import tempfile
import io

Expand Down
5 changes: 2 additions & 3 deletions vamb/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from pathlib import Path
from collections.abc import Sequence
from collections import defaultdict
from functools import reduce
from torch.utils.data import DataLoader
import pandas as pd

Expand Down Expand Up @@ -1080,7 +1079,7 @@ def parse_mmseqs_taxonomy(

if list(df_mmseq[0]) != list(contignames):
raise AssertionError(
f"The contig names of taxonomy entries are not the same as in the contigs metadata"
"The contig names of taxonomy entries are not the same as in the contigs metadata"
)

return graph_column
Expand Down Expand Up @@ -1180,7 +1179,7 @@ def predict_taxonomy(
while table_parent[label] != -1:
pred_labels.append(table_parent[label])
label = table_parent[label]
pred_labels = ";".join([nodes_ar[l] for l in pred_labels][::-1])
pred_labels = ";".join([nodes_ar[label] for label in pred_labels][::-1])
threshold_mask = (
predicted_vector[i] > predictor_training_options.softmax_threshold
)
Expand Down
1 change: 0 additions & 1 deletion vamb/h_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import torch.nn.functional as F
from torch.utils.data.dataset import TensorDataset as _TensorDataset
from torch.utils.data import DataLoader as _DataLoader
from torch.optim import Adam as _Adam
from torch import Tensor

import vamb.semisupervised_encode as _semisupervised_encode
Expand Down
2 changes: 1 addition & 1 deletion vamb/parsecontigs.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import numpy as _np
import vamb.vambtools as _vambtools
from collections.abc import Iterable, Sequence
from typing import IO, Union, TypeVar, Optional, IO
from typing import IO, Union, TypeVar, Optional
from pathlib import Path
import warnings

Expand Down
6 changes: 1 addition & 5 deletions vamb/reclustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

import os
import subprocess
import tempfile
import sys
import contextlib
from typing import IO
Expand All @@ -16,6 +15,7 @@
from sklearn.cluster import DBSCAN
from sklearn.metrics import pairwise_distances
import gzip
import lzma


def log(string: str, logfile: IO[str], indent: int = 0):
Expand Down Expand Up @@ -92,16 +92,12 @@ def op(f, _):
return f

elif fname.endswith(".gz"):
import gzip

op = gzip.open
elif fname.endswith(".bz2"):
import bz2

op = bz2.open
elif fname.endswith(".xz"):
import lzma

op = lzma.open
else:
op = open
Expand Down

0 comments on commit 2d028ad

Please sign in to comment.