From 330bd6300fa41071d35e7d123d64a93849a4dde5 Mon Sep 17 00:00:00 2001 From: daniel <1534513+dantp-ai@users.noreply.github.com> Date: Thu, 29 Feb 2024 23:47:56 +0100 Subject: [PATCH] Ignore F401 for now as they depend on things not implemented yet. --- minitorch/autodiff.py | 2 +- minitorch/fast_conv.py | 7 +++---- minitorch/fast_ops.py | 10 ++++++---- minitorch/nn.py | 2 +- minitorch/scalar.py | 16 ++++++++-------- minitorch/tensor_ops.py | 13 +++++++------ project/interface/mlprimer.py | 2 +- project/run_manual.py | 1 + project/run_scalar.py | 1 + tests/test_operators.py | 2 +- 10 files changed, 30 insertions(+), 26 deletions(-) diff --git a/minitorch/autodiff.py b/minitorch/autodiff.py index 06496e2..a3b02ac 100644 --- a/minitorch/autodiff.py +++ b/minitorch/autodiff.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Any, Iterable, List, Tuple +from typing import Any, Iterable, List, Tuple # noqa: F401 from typing_extensions import Protocol diff --git a/minitorch/fast_conv.py b/minitorch/fast_conv.py index ce4244c..37aa27c 100644 --- a/minitorch/fast_conv.py +++ b/minitorch/fast_conv.py @@ -1,13 +1,12 @@ from typing import Tuple -import numpy as np -from numba import njit, prange +import numpy as np # noqa: F401 +from numba import njit, prange # noqa: F401 from .autodiff import Context from .tensor import Tensor +from .tensor_data import MAX_DIMS, Index # noqa: F401 from .tensor_data import ( - MAX_DIMS, - Index, Shape, Strides, broadcast_index, diff --git a/minitorch/fast_ops.py b/minitorch/fast_ops.py index dc73b86..40147d0 100644 --- a/minitorch/fast_ops.py +++ b/minitorch/fast_ops.py @@ -2,11 +2,12 @@ from typing import TYPE_CHECKING -import numpy as np -from numba import njit, prange +import numpy as np # noqa: F401 +from numba import njit +from numba import prange # noqa: F401 +from .tensor_data import MAX_DIMS # noqa: F401 from .tensor_data import ( - MAX_DIMS, broadcast_index, index_to_position, shape_broadcast, @@ -18,7 +19,8 @@ from typing import Callable, Optional from .tensor import Tensor - from .tensor_data import Index, Shape, Storage, Strides + from .tensor_data import Index # noqa: F401 + from .tensor_data import Shape, Storage, Strides # TIP: Use `NUMBA_DISABLE_JIT=1 pytest tests/ -m task3_1` to run these tests without JIT. diff --git a/minitorch/nn.py b/minitorch/nn.py index 92c0c8f..a74e367 100644 --- a/minitorch/nn.py +++ b/minitorch/nn.py @@ -4,7 +4,7 @@ from .autodiff import Context from .fast_ops import FastOps from .tensor import Tensor -from .tensor_functions import Function, rand, tensor +from .tensor_functions import Function, rand, tensor # noqa: F401 def tile(input: Tensor, kernel: Tuple[int, int]) -> Tuple[Tensor, int, int]: diff --git a/minitorch/scalar.py b/minitorch/scalar.py index 942079d..449ca51 100644 --- a/minitorch/scalar.py +++ b/minitorch/scalar.py @@ -6,18 +6,18 @@ import numpy as np from .autodiff import Context, Variable, backpropagate, central_difference +from .scalar_functions import EQ # noqa: F401 +from .scalar_functions import LT # noqa: F401 +from .scalar_functions import Add # noqa: F401 +from .scalar_functions import Exp # noqa: F401 +from .scalar_functions import Log # noqa: F401 +from .scalar_functions import Neg # noqa: F401 +from .scalar_functions import ReLU # noqa: F401 +from .scalar_functions import Sigmoid # noqa: F401 from .scalar_functions import ( - EQ, - LT, - Add, - Exp, Inv, - Log, Mul, - Neg, - ReLU, ScalarFunction, - Sigmoid, ) ScalarLike = Union[float, int, "Scalar"] diff --git a/minitorch/tensor_ops.py b/minitorch/tensor_ops.py index db82d54..29d92a3 100644 --- a/minitorch/tensor_ops.py +++ b/minitorch/tensor_ops.py @@ -2,21 +2,22 @@ from typing import TYPE_CHECKING, Callable, Optional, Type -import numpy as np +import numpy as np # noqa: F401 from typing_extensions import Protocol from . import operators +from .tensor_data import MAX_DIMS # noqa: F401 +from .tensor_data import broadcast_index # noqa: F401 +from .tensor_data import index_to_position # noqa: F401 +from .tensor_data import to_index # noqa: F401 from .tensor_data import ( - MAX_DIMS, - broadcast_index, - index_to_position, shape_broadcast, - to_index, ) if TYPE_CHECKING: from .tensor import Tensor - from .tensor_data import Index, Shape, Storage, Strides + from .tensor_data import Index # noqa: F401 + from .tensor_data import Shape, Storage, Strides class MapProto(Protocol): diff --git a/project/interface/mlprimer.py b/project/interface/mlprimer.py index 1d98deb..0de73d5 100644 --- a/project/interface/mlprimer.py +++ b/project/interface/mlprimer.py @@ -1,11 +1,11 @@ import random import chalk as ch +from chalk import path # noqa: F401 from chalk import ( Trail, empty, make_path, - path, place_on_path, rectangle, unit_x, diff --git a/project/run_manual.py b/project/run_manual.py index 302846f..d14c780 100644 --- a/project/run_manual.py +++ b/project/run_manual.py @@ -2,6 +2,7 @@ Be sure you have minitorch installed in you Virtual Env. >>> pip install -Ue . """ + import random import minitorch diff --git a/project/run_scalar.py b/project/run_scalar.py index 4b7ee22..cad6511 100644 --- a/project/run_scalar.py +++ b/project/run_scalar.py @@ -2,6 +2,7 @@ Be sure you have minitorch installed in you Virtual Env. >>> pip install -Ue . """ + import random import minitorch diff --git a/tests/test_operators.py b/tests/test_operators.py index 1069423..a279197 100644 --- a/tests/test_operators.py +++ b/tests/test_operators.py @@ -5,6 +5,7 @@ from hypothesis.strategies import lists from minitorch import MathTest +from minitorch.operators import sigmoid # noqa: F401 from minitorch.operators import ( add, addLists, @@ -21,7 +22,6 @@ prod, relu, relu_back, - sigmoid, sum, )