Skip to content

Commit

Permalink
Fixed error and mock:
Browse files Browse the repository at this point in the history
Traceback (most recent call last):
  File "/home/derrick/src/aws-log-parser/examples/count-hosts.py", line 103, in <module>
    main()
  File "/home/derrick/src/aws-log-parser/examples/count-hosts.py", line 100, in main
    count_ips(entries, ip_attr)
  File "/home/derrick/src/aws-log-parser/examples/count-hosts.py", line 13, in count_ips
    counter = Counter(attrgetter(ip_attr)(entry) for entry in entries)
  File "/home/derrick/.pyenv/versions/3.10.9/lib/python3.10/collections/__init__.py", line 577, in __init__
    self.update(iterable, **kwds)
  File "/home/derrick/.pyenv/versions/3.10.9/lib/python3.10/collections/__init__.py", line 670, in update
    _count_elements(self, iterable)
  File "/home/derrick/src/aws-log-parser/examples/count-hosts.py", line 13, in <genexpr>
    counter = Counter(attrgetter(ip_attr)(entry) for entry in entries)
  File "/home/derrick/src/aws-log-parser/aws_log_parser/interface.py", line 180, in read_url
    yield from self.read_s3(
  File "/home/derrick/src/aws-log-parser/aws_log_parser/interface.py", line 145, in read_s3
    yield from self.parse(
  File "/home/derrick/src/aws-log-parser/aws_log_parser/interface.py", line 92, in parse
    yield from log_entries
  File "/home/derrick/src/aws-log-parser/aws_log_parser/interface.py", line 70, in parse_csv
    for row in csv.reader(content, delimiter=self.log_type.delimiter):
  File "/home/derrick/src/aws-log-parser/aws_log_parser/aws/s3.py", line 49, in read_keys
    yield from self.read_key(bucket, file["Key"])
  File "/home/derrick/src/aws-log-parser/aws_log_parser/aws/s3.py", line 36, in read_key
    fileobj=BytesIO(contents["Body"].iter_lines()),
TypeError: a bytes-like object is required, not 'generator'
  • Loading branch information
dpetzold committed Jan 28, 2025
1 parent cec5b22 commit 4438e65
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 8 deletions.
2 changes: 1 addition & 1 deletion aws_log_parser/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def read_key(self, bucket, key):
print(f"Reading s3://{bucket}/{key}")
contents = self.client.get_object(Bucket=bucket, Key=key)
yield from FileIterator(
fileobj=BytesIO(contents["Body"].iter_lines()),
fileobj=BytesIO(contents["Body"].read()),
gzipped=key.endswith(".gz"),
)

Expand Down
13 changes: 6 additions & 7 deletions test/test_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@ def paginate(self, **_):
"Contents": [
{
"Key": f"cloudfront-multiple.log{suffix}",
"LastModified": datetime.datetime(
2021, 11, 28, 3, 31, 56, tzinfo=tzutc()
),
"LastModified": datetime.datetime(2021, 11, 28, 3, 31, 56, tzinfo=tzutc()),
"ETag": '"37c13f9a66a79c2b474356adaf5da1d0"',
"Size": 2844,
"StorageClass": "STANDARD",
Expand All @@ -39,9 +37,12 @@ def paginate(self, **_):
class MockStreamingFile:
filename: str

def iter_lines(self):
def read(self):
return open(self.filename, "rb").read()

def iter_lines(self):
yield from [line for line in open(self.filename, "rb")]


@dataclass
class MockS3Client:
Expand Down Expand Up @@ -104,9 +105,7 @@ def test_parse_s3_gzipped(monkeypatch, cloudfront_parser):

def test_parse_url_s3(monkeypatch, cloudfront_parser):
monkeypatch.setattr(S3Service, "client", MockS3Client())
entries = cloudfront_parser.read_url(
"s3://aws-logs-test-data/cloudfront-multiple.log"
)
entries = cloudfront_parser.read_url("s3://aws-logs-test-data/cloudfront-multiple.log")
assert len(list(entries)) == 6


Expand Down

0 comments on commit 4438e65

Please sign in to comment.