Skip to content

Commit

Permalink
Ignore F401 for now as they depend on things not implemented yet.
Browse files Browse the repository at this point in the history
  • Loading branch information
dantp-ai committed Feb 29, 2024
1 parent a136224 commit 330bd63
Show file tree
Hide file tree
Showing 10 changed files with 30 additions and 26 deletions.
2 changes: 1 addition & 1 deletion minitorch/autodiff.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from dataclasses import dataclass
from typing import Any, Iterable, List, Tuple
from typing import Any, Iterable, List, Tuple # noqa: F401

from typing_extensions import Protocol

Expand Down
7 changes: 3 additions & 4 deletions minitorch/fast_conv.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
from typing import Tuple

import numpy as np
from numba import njit, prange
import numpy as np # noqa: F401
from numba import njit, prange # noqa: F401

from .autodiff import Context
from .tensor import Tensor
from .tensor_data import MAX_DIMS, Index # noqa: F401
from .tensor_data import (
MAX_DIMS,
Index,
Shape,
Strides,
broadcast_index,
Expand Down
10 changes: 6 additions & 4 deletions minitorch/fast_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@

from typing import TYPE_CHECKING

import numpy as np
from numba import njit, prange
import numpy as np # noqa: F401
from numba import njit
from numba import prange # noqa: F401

from .tensor_data import MAX_DIMS # noqa: F401
from .tensor_data import (
MAX_DIMS,
broadcast_index,
index_to_position,
shape_broadcast,
Expand All @@ -18,7 +19,8 @@
from typing import Callable, Optional

from .tensor import Tensor
from .tensor_data import Index, Shape, Storage, Strides
from .tensor_data import Index # noqa: F401
from .tensor_data import Shape, Storage, Strides

# TIP: Use `NUMBA_DISABLE_JIT=1 pytest tests/ -m task3_1` to run these tests without JIT.

Expand Down
2 changes: 1 addition & 1 deletion minitorch/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from .autodiff import Context
from .fast_ops import FastOps
from .tensor import Tensor
from .tensor_functions import Function, rand, tensor
from .tensor_functions import Function, rand, tensor # noqa: F401


def tile(input: Tensor, kernel: Tuple[int, int]) -> Tuple[Tensor, int, int]:
Expand Down
16 changes: 8 additions & 8 deletions minitorch/scalar.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,18 @@
import numpy as np

from .autodiff import Context, Variable, backpropagate, central_difference
from .scalar_functions import EQ # noqa: F401
from .scalar_functions import LT # noqa: F401
from .scalar_functions import Add # noqa: F401
from .scalar_functions import Exp # noqa: F401
from .scalar_functions import Log # noqa: F401
from .scalar_functions import Neg # noqa: F401
from .scalar_functions import ReLU # noqa: F401
from .scalar_functions import Sigmoid # noqa: F401
from .scalar_functions import (
EQ,
LT,
Add,
Exp,
Inv,
Log,
Mul,
Neg,
ReLU,
ScalarFunction,
Sigmoid,
)

ScalarLike = Union[float, int, "Scalar"]
Expand Down
13 changes: 7 additions & 6 deletions minitorch/tensor_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,22 @@

from typing import TYPE_CHECKING, Callable, Optional, Type

import numpy as np
import numpy as np # noqa: F401
from typing_extensions import Protocol

from . import operators
from .tensor_data import MAX_DIMS # noqa: F401
from .tensor_data import broadcast_index # noqa: F401
from .tensor_data import index_to_position # noqa: F401
from .tensor_data import to_index # noqa: F401
from .tensor_data import (
MAX_DIMS,
broadcast_index,
index_to_position,
shape_broadcast,
to_index,
)

if TYPE_CHECKING:
from .tensor import Tensor
from .tensor_data import Index, Shape, Storage, Strides
from .tensor_data import Index # noqa: F401
from .tensor_data import Shape, Storage, Strides


class MapProto(Protocol):
Expand Down
2 changes: 1 addition & 1 deletion project/interface/mlprimer.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import random

import chalk as ch
from chalk import path # noqa: F401
from chalk import (
Trail,
empty,
make_path,
path,
place_on_path,
rectangle,
unit_x,
Expand Down
1 change: 1 addition & 0 deletions project/run_manual.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Be sure you have minitorch installed in you Virtual Env.
>>> pip install -Ue .
"""

import random

import minitorch
Expand Down
1 change: 1 addition & 0 deletions project/run_scalar.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Be sure you have minitorch installed in you Virtual Env.
>>> pip install -Ue .
"""

import random

import minitorch
Expand Down
2 changes: 1 addition & 1 deletion tests/test_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from hypothesis.strategies import lists

from minitorch import MathTest
from minitorch.operators import sigmoid # noqa: F401
from minitorch.operators import (
add,
addLists,
Expand All @@ -21,7 +22,6 @@
prod,
relu,
relu_back,
sigmoid,
sum,
)

Expand Down

0 comments on commit 330bd63

Please sign in to comment.