Skip to content

Commit

Permalink
Add PEP8 compliance check (#16)
Browse files Browse the repository at this point in the history
* Add PEP8 compliance check

* Fix YAML indentation

* Ignore errors caused by tab-indentation

* Exclude ANTLR directory from PEP8 check

* Comply with PEP8

* Avoid violating E712 in overloaded comparison to False in Sugar.py
  • Loading branch information
emileferreira authored Feb 2, 2024
1 parent 6638324 commit dbe50c6
Show file tree
Hide file tree
Showing 10 changed files with 45 additions and 31 deletions.
16 changes: 14 additions & 2 deletions .github/workflows/test-python.yml → .github/workflows/python.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name: Python tests
name: Python
on: [push, workflow_dispatch, pull_request]
jobs:
build:
test:
runs-on: ubuntu-latest
strategy:
matrix:
Expand All @@ -18,3 +18,15 @@ jobs:
pip install antlr4-python3-runtime==4.9.1
- name: Run all the Python tests
run: python3 tests/test_all.py
pep8:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: 'Run PEP8 check'
uses: quentinguidee/pep8-action@v1
with:
arguments: >-
--exclude=.svn,CVS,.bzr,.hg,.git,zzantlr
--ignore=E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505,W191,E101,E128
4 changes: 2 additions & 2 deletions RASP_support/DrawCompFlow.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def makeQKStable(qvars, kvars, select, ref_in_g):
# select has qvars along the rows and kvars along the columns, so we'll do
# the same. i.e. top rows will just be the kvars and first columns will
# just be the qvars.
# if (not qvars) and (not kvars):
# if (not qvars) and (not kvars):
# # no qvars or kvars -> full select -> dont waste space drawing.
# num_rows, num_columns = 0, 0
# pass
Expand Down Expand Up @@ -582,7 +582,7 @@ def draw_comp_flow(self, w, filename=None,
# (though it will not be able to draw computation flows without it)
from graphviz import Digraph
g = Digraph('g')
# with curved lines it fusses over separating score edges
# with curved lines it fusses over separating score edges
# and makes weirdly curved ones that start overlapping with the sequences
# :(
g.attr(splines='polyline')
Expand Down
2 changes: 1 addition & 1 deletion RASP_support/Environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def carefulcopy(val):
if isinstance(val, Unfinished) or isinstance(val, RASPFunction):
return val # non mutable, at least not through rasp commands
elif isinstance(val, float) or isinstance(val, int) \
or isinstance(val, str) or isinstance(val, bool):
or isinstance(val, str) or isinstance(val, bool):
return val # non mutable
elif isinstance(val, list):
return [carefulcopy(v) for v in val]
Expand Down
6 changes: 3 additions & 3 deletions RASP_support/Evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def _set_iterator_and_vals(self, iterator_names, iterator_vals):
if len(iterator_names) == 1:
self.env.set_variable(iterator_names[0], iterator_vals)
elif isinstance(iterator_vals, Iterable) \
and (len(iterator_vals) == len(iterator_names)):
and (len(iterator_vals) == len(iterator_names)):
for n, v in zip(iterator_names, iterator_vals):
self.env.set_variable(n, v)
else:
Expand Down Expand Up @@ -248,7 +248,7 @@ def _evaluateListComp(self, ast):
for vals in ll:
orig_env = self.env
self.env = self.env.make_nested()
# sets inside the now-nested env -don't want to keep
# sets inside the now-nested env -don't want to keep
# the internal iterators after finishing this list comp
self._set_iterator_and_vals(iterator_names, vals)
res.append(self.evaluateExpr(ast.val))
Expand Down Expand Up @@ -630,7 +630,7 @@ def _test_res(self, res):
def succeeds_with(exampe):
try:
res(example, just_pass_exception_up=True)
except:
except Exception:
return False
else:
return True
Expand Down
32 changes: 16 additions & 16 deletions RASP_support/FunctionalSupport.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def get_parents(self):
for p in other_parents:
# recursion: branch back through all the parents of the unf,
# always stopping wherever hit something 'real' ie a select or
# a sequence
# a sequence
res += p.get_parents()
# nothing is made from more than one select...
assert len(
Expand Down Expand Up @@ -147,7 +147,7 @@ def get_full_parents(self, recurse=False, just_compute=False,
for p in self.get_sorted_full_parents():
p.get_full_parents(recurse=True, just_compute=True)
# have them all compute their full parents so they are
# ready for the future, but only do this in sorted order,
# ready for the future, but only do this in sorted order,
# so recursion is always shallow. (always gets shorted with
# self._full_parents, which is being computed here for each
# unfinished starting from the top of the computation
Expand Down Expand Up @@ -277,12 +277,12 @@ def __init__(self, parents_tuple, parents2self,
from_zipmap=False, output_index=-1,
definitely_uses_identity_function=False):
# min_poss_depth=0 starts all of the base sequences (eg indices) off
# right.
# right.

# might have got none from some default value, fix it before continuing
# because later things eg DrawCompFlow will expect name to be str
if name is None:
name = plain_unfinished_sequence_name
name = plain_unfinished_sequence_name
super(UnfinishedSequence, self).__init__(parents_tuple,
parents2self, name=name,
min_poss_depth=min_poss_depth)
Expand Down Expand Up @@ -441,13 +441,13 @@ def select(q_vars, k_vars, selector, name=None, compare_string=None):
# helpful for the user so consider maybe adding a tiny bit of mess here
# (including markings inside sequences and selectors so they know which
# index they're gathering to and from) to allow it

# we're ok with getting a single q or k var, not in a tuple,
# but important to fix it before '+' on two UnfinishedSequences
# (as opposed to two tuples) sends everything sideways
q_vars = tupleise(q_vars)
k_vars = tupleise(k_vars)

# attn layer is one after values it needs to be calculated
new_depth = _min_poss_depth(q_vars+k_vars)+1
res = UnfinishedSelect((_input, # need input seq length to create select
Expand Down Expand Up @@ -548,19 +548,19 @@ def parents2res(w, vt): return _zipmap(len(w), vt, elementwise_function)
# you can do it in the embedding
# if len(sequences_tuple)>0:
# min_poss_depth = max(min_poss_depth,1) # except for the very specific
# # case where it is the very first thing to be done, in which case we do
# # have to go through one layer to get to the first feedforward.
# # the 'if' is there to rule out increasing when doing a feedforward on
# # nothing, ie, when making a constant. constants are allowed to be
# # created on layer 0, they're part of the embedding or the weights that
# # will use them later or whatever, it's fine
# # case where it is the very first thing to be done, in which case we do
# # have to go through one layer to get to the first feedforward.
# # the 'if' is there to rule out increasing when doing a feedforward on
# # nothing, ie, when making a constant. constants are allowed to be
# # created on layer 0, they're part of the embedding or the weights that
# # will use them later or whatever, it's fine

# at least as deep as needed MVs, but no deeper cause FF
# (which happens at end of layer)
return format_output(parents_tuple, parents2res, name,
min_poss_depth=min_poss_depth,
elementwise_function=elementwise_function,
from_zipmap=True)
from_zipmap=True)


def aggregate(select, sequences_tuple, elementwise_function=None,
Expand All @@ -574,7 +574,7 @@ def aggregate(select, sequences_tuple, elementwise_function=None,
def parents2res(s, vt): return _aggregate(
s, vt, elementwise_function, default=default)
def_uses = definitely_uses_identity_function

# at least as deep as needed attention and at least one deeper than needed
# MVs
return format_output(parents_tuple, parents2res, name,
Expand All @@ -583,7 +583,7 @@ def parents2res(s, vt): return _aggregate(
min_poss_depth=max(_min_poss_depth(
sequences_tuple)+1, select.min_poss_depth),
definitely_uses_identity_function=def_uses)


# up to here was just plain transformer 'assembly'. any addition is a lie
# now begin the bells and whistles
Expand Down
2 changes: 1 addition & 1 deletion RASP_support/REPL.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ def get_input_tree(self):
if isinstance(newinput, Stop): # input stream ended
return Stop()
if is_comment(newinput):
# don't let comments get in and ruin things somehow
# don't let comments get in and ruin things somehow
newinput = ""
# don't replace newlines here! this is how in-function comments get
# broken
Expand Down
6 changes: 4 additions & 2 deletions RASP_support/Sugar.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# top-level rasp file we import, and nice to have draw_comp_flow added into
# the sequences already on load


def _apply_unary_op(self, f):
return zipmap(self, f)

Expand Down Expand Up @@ -69,8 +70,9 @@ def asbool(seq):

def tplnot(seq, name=None):
# this one does correct conversion using asbool and then we really can just
# do ==False
res = asbool(seq) == False
# do == False
pep8hack = False # this avoids violating E712 of PEP8
res = asbool(seq) == pep8hack
return _addname(res, name, "( not " + str(seq.name) + " )")


Expand Down
4 changes: 2 additions & 2 deletions RASP_support/Support.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,10 +215,10 @@ def prep_default(default, num_output_vars):
verify_default_size(default, num_output_vars)
if not isinstance(default, tuple):
# specifically with how we're going to do things here in the
# average aggregate, will help to actually have the outputs get
# average aggregate, will help to actually have the outputs get
# passed around as tuples, even if they're scalars really.
# but do this after the size check for the scalar one so it doesn't
# get filled with weird ifs... this tupled scalar thing is only a
# get filled with weird ifs... this tupled scalar thing is only a
# convenience in this implementation in this here function
default = (default,)
return default
Expand Down
2 changes: 1 addition & 1 deletion RASP_support/analyse.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def note_if_seeker(self):
return

if (not self.get_parent_sequences()) \
and (self.get_parent_select() is not None):
and (self.get_parent_select() is not None):
# no parent sequences, but yes parent select: this value is a function
# of only its parent select, i.e., a seeker (marks whether select found
# something or not)
Expand Down
2 changes: 1 addition & 1 deletion RASP_support/make_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def __rpow__(self, other):
return apply_binary_op(self, other, lambda a, b: pow(b, a))

# skipping and, or, xor, which are bitwise and dont implement 'and' and
# 'or' but rather & and |.
# 'or' but rather & and |.
# similarly skipping lshift, rshift cause who wants them.
# wish i had not, and, or primitives, but can accept that dont.
# if people really want to do 'not' they can do '==False' instead, can do a
Expand Down

0 comments on commit dbe50c6

Please sign in to comment.