Skip to content

Commit

Permalink
sync
Browse files Browse the repository at this point in the history
  • Loading branch information
krrish-sehgal committed Jan 26, 2025
2 parents d36ab7d + 13da0aa commit 02cd16d
Show file tree
Hide file tree
Showing 83 changed files with 1,519 additions and 1,326 deletions.
8 changes: 2 additions & 6 deletions blt/middleware/ip_restrict.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,7 @@ def increment_block_count(self, ip=None, network=None, user_agent=None):
if ip:
blocked_entry = Blocked.objects.select_for_update().filter(address=ip).first()
elif network:
blocked_entry = (
Blocked.objects.select_for_update().filter(ip_network=network).first()
)
blocked_entry = Blocked.objects.select_for_update().filter(ip_network=network).first()
elif user_agent:
# Correct lookup: find if any user_agent_string is a substring of the user_agent
blocked_entry = (
Expand All @@ -111,9 +109,7 @@ def increment_block_count(self, ip=None, network=None, user_agent=None):
blocked_entry.save(update_fields=["count"])

def __call__(self, request):
ip = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")[0].strip() or request.META.get(
"REMOTE_ADDR", ""
)
ip = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")[0].strip() or request.META.get("REMOTE_ADDR", "")
agent = request.META.get("HTTP_USER_AGENT", "").strip()

blocked_ips = self.blocked_ips()
Expand Down
4 changes: 1 addition & 3 deletions blt/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,9 +244,7 @@
if not GOOGLE_CREDENTIALS:
raise Exception("GOOGLE_CREDENTIALS environment variable is not set.")

GS_CREDENTIALS = service_account.Credentials.from_service_account_info(
json.loads(GOOGLE_CREDENTIALS)
)
GS_CREDENTIALS = service_account.Credentials.from_service_account_info(json.loads(GOOGLE_CREDENTIALS))

STORAGES = {
"default": {
Expand Down
4 changes: 1 addition & 3 deletions comments/migrations/0001_initial.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@ class Migration(migrations.Migration):
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"),
),
("author", models.CharField(max_length=200)),
("author_url", models.CharField(max_length=200)),
Expand Down
4 changes: 1 addition & 3 deletions comments/migrations/0002_comment_parentid.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name="comment",
name="parentId",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment"
),
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment"),
),
]
4 changes: 1 addition & 3 deletions comments/migrations/0005_auto_20170727_1309.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name="comment",
name="parent",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment"
),
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment"),
),
]
12 changes: 3 additions & 9 deletions comments/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,7 @@ def reply_comment(request, pk):
issue = Issue.objects.get(pk=request.GET["issue_pk"])
reply_text = request.GET.get("text_comment")
reply_text = escape(reply_text)
comment = Comment(
author=author, author_url=author_url, issue=issue, text=reply_text, parent=parent_obj
)
comment = Comment(author=author, author_url=author_url, issue=issue, text=reply_text, parent=parent_obj)
comment.save()
all_comment = Comment.objects.filter(issue=issue)
return render(
Expand All @@ -145,15 +143,11 @@ def autocomplete(request):
q_string = request.GET.get("search", "")
q_string = escape(q_string)
if len(q_string) == 0:
return HttpResponse(
request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json"
)
return HttpResponse(request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json")
q_list = q_string.split(" ")
q_s = q_list[len(q_list) - 1]
if len(q_s) == 0 or q_s[0] != "@":
return HttpResponse(
request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json"
)
return HttpResponse(request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json")

q_s = q_s[1:]
search_qs = User.objects.filter(username__startswith=q_s)
Expand Down
88 changes: 21 additions & 67 deletions website/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,15 +166,11 @@ def get_issue_info(self, request, issue):
if issue.screenshot:
# If an image exists in the Issue table, return it along with additional images from IssueScreenshot
screenshots = [request.build_absolute_uri(issue.screenshot.url)] + [
request.build_absolute_uri(screenshot.image.url)
for screenshot in issue.screenshots.all()
request.build_absolute_uri(screenshot.image.url) for screenshot in issue.screenshots.all()
]
else:
# If no image exists in the Issue table, return only the images from IssueScreenshot
screenshots = [
request.build_absolute_uri(screenshot.image.url)
for screenshot in issue.screenshots.all()
]
screenshots = [request.build_absolute_uri(screenshot.image.url) for screenshot in issue.screenshots.all()]

is_upvoted = False
is_flagged = False
Expand Down Expand Up @@ -232,9 +228,7 @@ def create(self, request, *args, **kwargs):

screenshot_count = len(self.request.FILES.getlist("screenshots"))
if screenshot_count == 0:
return Response(
{"error": "Upload at least one image!"}, status=status.HTTP_400_BAD_REQUEST
)
return Response({"error": "Upload at least one image!"}, status=status.HTTP_400_BAD_REQUEST)
elif screenshot_count > 5:
return Response({"error": "Max limit of 5 images!"}, status=status.HTTP_400_BAD_REQUEST)

Expand All @@ -247,9 +241,7 @@ def create(self, request, *args, **kwargs):
for screenshot in self.request.FILES.getlist("screenshots"):
if image_validator(screenshot):
filename = screenshot.name
screenshot.name = (
f"{filename[:10]}{str(uuid.uuid4())[:40]}.{filename.split('.')[-1]}"
)
screenshot.name = f"{filename[:10]}{str(uuid.uuid4())[:40]}.{filename.split('.')[-1]}"
file_path = default_storage.save(f"screenshots/{screenshot.name}", screenshot)

# Create the IssueScreenshot object and associate it with the issue
Expand Down Expand Up @@ -387,19 +379,11 @@ def filter(self, request, *args, **kwargs):
temp["rank"] = rank_user
temp["id"] = each["id"]
temp["User"] = each["username"]
temp["score"] = Points.objects.filter(user=each["id"]).aggregate(
total_score=Sum("score")
)
temp["image"] = list(UserProfile.objects.filter(user=each["id"]).values("user_avatar"))[
0
]
temp["title_type"] = list(UserProfile.objects.filter(user=each["id"]).values("title"))[
0
]
temp["score"] = Points.objects.filter(user=each["id"]).aggregate(total_score=Sum("score"))
temp["image"] = list(UserProfile.objects.filter(user=each["id"]).values("user_avatar"))[0]
temp["title_type"] = list(UserProfile.objects.filter(user=each["id"]).values("title"))[0]
temp["follows"] = list(UserProfile.objects.filter(user=each["id"]).values("follows"))[0]
temp["savedissue"] = list(
UserProfile.objects.filter(user=each["id"]).values("issue_saved")
)[0]
temp["savedissue"] = list(UserProfile.objects.filter(user=each["id"]).values("issue_saved"))[0]
rank_user = rank_user + 1
users.append(temp)

Expand Down Expand Up @@ -470,9 +454,7 @@ def get(self, request, format=None, *args, **kwargs):
def organization_leaderboard(self, request, *args, **kwargs):
paginator = PageNumberPagination()
organizations = (
Organization.objects.values()
.annotate(issue_count=Count("domain__issue"))
.order_by("-issue_count")
Organization.objects.values().annotate(issue_count=Count("domain__issue")).order_by("-issue_count")
)
page = paginator.paginate_queryset(organizations, request)

Expand All @@ -486,9 +468,7 @@ def get(self, request, *args, **kwargs):
hunt_count = Hunt.objects.all().count()
domain_count = Domain.objects.all().count()

return Response(
{"bugs": bug_count, "users": user_count, "hunts": hunt_count, "domains": domain_count}
)
return Response({"bugs": bug_count, "users": user_count, "hunts": hunt_count, "domains": domain_count})


class UrlCheckApiViewset(APIView):
Expand All @@ -503,9 +483,7 @@ def post(self, request, *args, **kwargs):
domain = domain_url.replace("https://", "").replace("http://", "").replace("www.", "")

issues = (
Issue.objects.filter(
Q(Q(domain__name=domain) | Q(domain__url__icontains=domain)) & Q(is_hidden=False)
)
Issue.objects.filter(Q(Q(domain__name=domain) | Q(domain__url__icontains=domain)) & Q(is_hidden=False))
.values(
"id",
"description",
Expand Down Expand Up @@ -533,27 +511,17 @@ def get_active_hunts(self, request, fields, *args, **kwargs):
return Response(hunts)

def get_previous_hunts(self, request, fields, *args, **kwargs):
hunts = (
Hunt.objects.values(*fields)
.filter(is_published=True, end_on__lte=datetime.now())
.order_by("-end_on")
)
hunts = Hunt.objects.values(*fields).filter(is_published=True, end_on__lte=datetime.now()).order_by("-end_on")
return Response(hunts)

def get_upcoming_hunts(self, request, fields, *args, **kwargs):
hunts = (
Hunt.objects.values(*fields)
.filter(is_published=True, starts_on__gte=datetime.now())
.order_by("starts_on")
Hunt.objects.values(*fields).filter(is_published=True, starts_on__gte=datetime.now()).order_by("starts_on")
)
return Response(hunts)

def get_search_by_name(self, request, search_query, fields, *args, **kwargs):
hunts = (
Hunt.objects.values(*fields)
.filter(is_published=True, name__icontains=search_query)
.order_by("end_on")
)
hunts = Hunt.objects.values(*fields).filter(is_published=True, name__icontains=search_query).order_by("end_on")
return Response(hunts)

def get(self, request, *args, **kwargs):
Expand Down Expand Up @@ -608,15 +576,11 @@ def get_active_hunts(self, request, *args, **kwargs):
return Response(self.serialize_hunts(hunts))

def get_previous_hunts(self, request, *args, **kwargs):
hunts = Hunt.objects.filter(is_published=True, end_on__lte=datetime.now()).order_by(
"-end_on"
)
hunts = Hunt.objects.filter(is_published=True, end_on__lte=datetime.now()).order_by("-end_on")
return Response(self.serialize_hunts(hunts))

def get_upcoming_hunts(self, request, *args, **kwargs):
hunts = Hunt.objects.filter(is_published=True, starts_on__gte=datetime.now()).order_by(
"starts_on"
)
hunts = Hunt.objects.filter(is_published=True, starts_on__gte=datetime.now()).order_by("starts_on")
return Response(self.serialize_hunts(hunts))

def get(self, request, *args, **kwargs):
Expand All @@ -626,23 +590,17 @@ def get(self, request, *args, **kwargs):
previousHunt = request.query_params.get("previousHunt")
upcomingHunt = request.query_params.get("upcomingHunt")
if activeHunt:
page = paginator.paginate_queryset(
self.get_active_hunts(request, *args, **kwargs), request
)
page = paginator.paginate_queryset(self.get_active_hunts(request, *args, **kwargs), request)

return paginator.get_paginated_response(page)

elif previousHunt:
page = paginator.paginate_queryset(
self.get_previous_hunts(request, *args, **kwargs), request
)
page = paginator.paginate_queryset(self.get_previous_hunts(request, *args, **kwargs), request)

return paginator.get_paginated_response(page)

elif upcomingHunt:
page = paginator.paginate_queryset(
self.get_upcoming_hunts(request, *args, **kwargs), request
)
page = paginator.paginate_queryset(self.get_upcoming_hunts(request, *args, **kwargs), request)

return paginator.get_paginated_response(page)

Expand All @@ -668,9 +626,7 @@ def post(self, request, *args, **kwargs):
try:
current_site = get_current_site(request)
referral_code, created = InviteFriend.objects.get_or_create(sender=request.user)
referral_link = (
f"https://{current_site.domain}/referral/?ref={referral_code.referral_code}"
)
referral_link = f"https://{current_site.domain}/referral/?ref={referral_code.referral_code}"

# Prepare email content
subject = f"Join me on {current_site.name}!"
Expand Down Expand Up @@ -703,9 +659,7 @@ def post(self, request, *args, **kwargs):
}
)
else:
return Response(
{"error": "Email failed to send", "email_status": "failed"}, status=500
)
return Response({"error": "Email failed to send", "email_status": "failed"}, status=500)

except smtplib.SMTPException as e:
return Response(
Expand Down
4 changes: 1 addition & 3 deletions website/bitcoin_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ def create_bacon_token(user, contribution):
contribution.txid = txid
contribution.save()

token = BaconToken.objects.create(
user=user, amount=amount, contribution=contribution, token_id=txid
)
token = BaconToken.objects.create(user=user, amount=amount, contribution=contribution, token_id=txid)
return token

except JSONRPCException as e:
Expand Down
12 changes: 3 additions & 9 deletions website/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,7 @@ def embed_documents_and_save(embed_docs):
if file.is_file():
with open(file, "rb") as f:
content = f.read()
default_storage.save(
str(db_folder_path / file.relative_to(temp_db_path)), ContentFile(content)
)
default_storage.save(str(db_folder_path / file.relative_to(temp_db_path)), ContentFile(content))
log_chat(f"Uploaded file {file.name} to storage")
except Exception as e:
log_chat(f"Error during FAISS index embedding and saving: {e}")
Expand All @@ -156,9 +154,7 @@ def load_vector_store():
check_db_folder_str = db_folder_str + "/index.faiss"
if not default_storage.exists(check_db_folder_str):
temp_dir.cleanup()
ChatBotLog.objects.create(
question="Folder does not exist", answer=f"Folder Str: {str(db_folder_str)}"
)
ChatBotLog.objects.create(question="Folder does not exist", answer=f"Folder Str: {str(db_folder_str)}")
return None

# Download all files from the storage folder to the temp directory
Expand Down Expand Up @@ -193,9 +189,7 @@ def conversation_chain(vector_store):
)
)
llm = ChatOpenAI(model_name="gpt-3.5-turbo-0125", temperature=0.5)
retriever = vector_store.as_retriever(
search_type="similarity", search_kwargs={"k": retrieval_search_results}
)
retriever = vector_store.as_retriever(search_type="similarity", search_kwargs={"k": retrieval_search_results})
memory = ConversationSummaryMemory(
llm=llm,
return_messages=True,
Expand Down
Loading

0 comments on commit 02cd16d

Please sign in to comment.