diff --git a/BACON/BLT BTC Runes documentation .md b/BACON/BLT BTC Runes documentation .md new file mode 100644 index 000000000..be70ef203 --- /dev/null +++ b/BACON/BLT BTC Runes documentation .md @@ -0,0 +1,59 @@ +# Documentation for etching runes for BACON + +**2 Jan, 2025\.** +**11AM IST.** + +Right now, we have two servers with alphaVPS as the provider. + +One node runs a testnet, where we plan to etch the BTC runes initially for POC purposes. + +And another one is syncing up with the mainnet, where we plan to etch the BACON token. + +**The current state while writing:** +1\. Testnet server has a corrupted chain index (started reindexing today), due to random killing of the bitcoind process, we might have to raise a ticket with alphaVPS for this, since its probable they do this because of I/O limits. + +2\. The mainnet is syncing slow and steady and is upto \~46.5% as of writing. + +**Current Workflow** + +1. On the testnet node, one tmux session is used to run the node and other is used to run the ord server. +2. Once both of these sync up, we will probably create another tmux session to create wallet and etch runes. +3. On the mainnet node, we just have a single tmux session as of writing where the bitcoind process is syncing the node with the mainnet. + +**Some useful references:** +[https://ordtutorial.vercel.app/ordtestnet](https://ordtutorial.vercel.app/ordtestnet) + +**Current bitcoind config on testnet.** +server=1 +testnet=1 +txindex=1 +rpcuser=apoorva +blockfilterindex=1 +rpcpassword=y^2DhUnxrhFr7qAj2yjhvykFz +rpcallowip=127.0.0.1 +[test] +rpcport=8332 +rpcbind=127.0.0.1 + +**Current bitcoind config on the mainnet:** +server=1 +txindex=1 +rpcuser=apoorva +blockfilterindex=1 +rpcpassword=y^2DhUnxrhFr7qAj2yjhvykFz +rpcallowip=127.0.0.1 +rpcport=8332 +blockfilterindex=1 + +Side note: We might want to add rpcbind here after the node syncs completely. + +**Command to start the bitcoind process on the testnet, note that we use the bitcoind snap package on both our servers.** +bitcoin-core.daemon \-datadir=/home/apoorva/test-btc-data \-dbcache=256 \-rpcworkqueue=1000 + +**Command to start the ordinal server to index blocks after syncing the node completely, we will create a wallet and etch runes once this completes.** +sudo ./ord \--bitcoin-rpc-user apoorva \--bitcoin-rpc-pass y^2DhUnxrhFr7qAj2yjhvykFz \--rpc-url http://127.0.0.1:8332 \--data-dir /home/apoorva/ord-data \--bitcoin-data-dir /home/apoorva/test-btc-data \--index-runes \--testnet \--verbose server + +**Some additional observations:** + +1. I found that ^C takes a lot of time to stop the bitcoind process,so another way to kill it instantly is finding the PID of the bitcoind process and killing it, saves a lot of time, but use with caution, since it might corrupt the indexing and syncing. + diff --git a/blt/middleware/ip_restrict.py b/blt/middleware/ip_restrict.py index 783f6a31a..2c9118d6b 100644 --- a/blt/middleware/ip_restrict.py +++ b/blt/middleware/ip_restrict.py @@ -87,9 +87,7 @@ def increment_block_count(self, ip=None, network=None, user_agent=None): if ip: blocked_entry = Blocked.objects.select_for_update().filter(address=ip).first() elif network: - blocked_entry = ( - Blocked.objects.select_for_update().filter(ip_network=network).first() - ) + blocked_entry = Blocked.objects.select_for_update().filter(ip_network=network).first() elif user_agent: # Correct lookup: find if any user_agent_string is a substring of the user_agent blocked_entry = ( @@ -111,9 +109,7 @@ def increment_block_count(self, ip=None, network=None, user_agent=None): blocked_entry.save(update_fields=["count"]) def __call__(self, request): - ip = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")[0].strip() or request.META.get( - "REMOTE_ADDR", "" - ) + ip = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")[0].strip() or request.META.get("REMOTE_ADDR", "") agent = request.META.get("HTTP_USER_AGENT", "").strip() blocked_ips = self.blocked_ips() diff --git a/blt/settings.py b/blt/settings.py index ae9f8bf1e..735090266 100644 --- a/blt/settings.py +++ b/blt/settings.py @@ -244,9 +244,7 @@ if not GOOGLE_CREDENTIALS: raise Exception("GOOGLE_CREDENTIALS environment variable is not set.") - GS_CREDENTIALS = service_account.Credentials.from_service_account_info( - json.loads(GOOGLE_CREDENTIALS) - ) + GS_CREDENTIALS = service_account.Credentials.from_service_account_info(json.loads(GOOGLE_CREDENTIALS)) STORAGES = { "default": { @@ -313,6 +311,7 @@ ACCOUNT_EMAIL_REQUIRED = True ACCOUNT_USERNAME_REQUIRED = True ACCOUNT_EMAIL_VERIFICATION = "optional" +ACCOUNT_FORMS = {"signup": "website.forms.SignupFormWithCaptcha"} SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") diff --git a/blt/urls.py b/blt/urls.py index 4ede4e748..3fd33c56e 100644 --- a/blt/urls.py +++ b/blt/urls.py @@ -35,13 +35,7 @@ UserIssueViewSet, UserProfileViewSet, ) -from website.views.blog import ( - PostCreateView, - PostDeleteView, - PostDetailView, - PostListView, - PostUpdateView, -) +from website.views.blog import PostCreateView, PostDeleteView, PostDetailView, PostListView, PostUpdateView from website.views.company import ( AddDomainView, AddHuntView, @@ -188,6 +182,8 @@ ) from website.views.slack_handlers import slack_commands, slack_events from website.views.teams import ( + TeamChallenges, + TeamLeaderboard, TeamOverview, add_member, create_team, @@ -203,6 +199,7 @@ GlobalLeaderboardView, InviteCreate, SpecificMonthLeaderboardView, + UserChallengeListView, UserDeleteView, UserProfileDetailsView, UserProfileDetailView, @@ -858,8 +855,12 @@ name="similarity_scan", ), path("projects/create/", create_project, name="create_project"), + path("teams/challenges/", TeamChallenges.as_view(), name="team_challenges"), + path("teams/leaderboard/", TeamLeaderboard.as_view(), name="team_leaderboard"), + path("challenges/", UserChallengeListView.as_view(), name="user_challenges"), path("project//", ProjectsDetailView.as_view(), name="projects_detail"), path("slack/events", slack_events, name="slack_events"), + path("owasp/", TemplateView.as_view(template_name="owasp.html"), name="owasp"), ] if settings.DEBUG: diff --git a/comments/migrations/0001_initial.py b/comments/migrations/0001_initial.py index 40d5f807e..3a4517c10 100644 --- a/comments/migrations/0001_initial.py +++ b/comments/migrations/0001_initial.py @@ -19,9 +19,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("author", models.CharField(max_length=200)), ("author_url", models.CharField(max_length=200)), diff --git a/comments/migrations/0002_comment_parentid.py b/comments/migrations/0002_comment_parentid.py index 074746e89..dfc0b6b46 100644 --- a/comments/migrations/0002_comment_parentid.py +++ b/comments/migrations/0002_comment_parentid.py @@ -14,8 +14,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name="comment", name="parentId", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment" - ), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment"), ), ] diff --git a/comments/migrations/0005_auto_20170727_1309.py b/comments/migrations/0005_auto_20170727_1309.py index 72e1793ac..3e6e3209d 100644 --- a/comments/migrations/0005_auto_20170727_1309.py +++ b/comments/migrations/0005_auto_20170727_1309.py @@ -14,8 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="comment", name="parent", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment" - ), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="comments.Comment"), ), ] diff --git a/comments/views.py b/comments/views.py index 5388781a0..c292a5bec 100644 --- a/comments/views.py +++ b/comments/views.py @@ -128,9 +128,7 @@ def reply_comment(request, pk): issue = Issue.objects.get(pk=request.GET["issue_pk"]) reply_text = request.GET.get("text_comment") reply_text = escape(reply_text) - comment = Comment( - author=author, author_url=author_url, issue=issue, text=reply_text, parent=parent_obj - ) + comment = Comment(author=author, author_url=author_url, issue=issue, text=reply_text, parent=parent_obj) comment.save() all_comment = Comment.objects.filter(issue=issue) return render( @@ -145,15 +143,11 @@ def autocomplete(request): q_string = request.GET.get("search", "") q_string = escape(q_string) if len(q_string) == 0: - return HttpResponse( - request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json" - ) + return HttpResponse(request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json") q_list = q_string.split(" ") q_s = q_list[len(q_list) - 1] if len(q_s) == 0 or q_s[0] != "@": - return HttpResponse( - request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json" - ) + return HttpResponse(request.GET["callback"] + "(" + json.dumps([]) + ");", content_type="application/json") q_s = q_s[1:] search_qs = User.objects.filter(username__startswith=q_s) diff --git a/poetry.lock b/poetry.lock index be1d6b67f..7d5dffc5b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,15 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] [[package]] name = "aiohappyeyeballs" @@ -135,13 +146,13 @@ files = [ [[package]] name = "anyio" -version = "4.7.0" +version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, - {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] [package.dependencies] @@ -151,7 +162,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -213,13 +224,13 @@ websockets = ">=12,<14" [[package]] name = "attrs" -version = "24.3.0" +version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" files = [ - {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, - {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] @@ -324,26 +335,15 @@ d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] -[[package]] -name = "boto" -version = "2.49.0" -description = "Amazon Web Services Library" -optional = false -python-versions = "*" -files = [ - {file = "boto-2.49.0-py2.py3-none-any.whl", hash = "sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8"}, - {file = "boto-2.49.0.tar.gz", hash = "sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a"}, -] - [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.1" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, + {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, + {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, ] [[package]] @@ -870,24 +870,6 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" -[[package]] -name = "deepdiff" -version = "8.1.1" -description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." -optional = false -python-versions = ">=3.8" -files = [ - {file = "deepdiff-8.1.1-py3-none-any.whl", hash = "sha256:b0231fa3afb0f7184e82535f2b4a36636442ed21e94a0cf3aaa7982157e7ebca"}, - {file = "deepdiff-8.1.1.tar.gz", hash = "sha256:dd7bc7d5c8b51b5b90f01b0e2fe23c801fd8b4c6a7ee7e31c5a3c3663fcc7ceb"}, -] - -[package.dependencies] -orderly-set = ">=5.2.3,<6" - -[package.extras] -cli = ["click (==8.1.7)", "pyyaml (==6.0.2)"] -optimize = ["orjson"] - [[package]] name = "defusedxml" version = "0.7.1" @@ -1011,22 +993,6 @@ files = [ Django = ">=1.11" six = "*" -[[package]] -name = "django-bootstrap-datepicker-plus" -version = "5.0.5" -description = "Bootstrap3/Bootstrap4/Bootstrap5 DatePickerInput, TimePickerInput, DateTimePickerInput, MonthPickerInput, YearPickerInput" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "django_bootstrap_datepicker_plus-5.0.5-py3-none-any.whl", hash = "sha256:f0818dcaca6824f2bc3b42b53e2cd0e42e9ceb2c8802254f9d38d4cce04b31d8"}, - {file = "django_bootstrap_datepicker_plus-5.0.5.tar.gz", hash = "sha256:ea5e2bc2137a72b57ba10d2a5f18373049b0d252052f921e5a76c260eaaa35ee"}, -] - -[package.dependencies] -Django = ">=2,<6" -pydantic = "*" -typing-extensions = "*" - [[package]] name = "django-braces" version = "1.16.0" @@ -1041,21 +1007,6 @@ files = [ [package.dependencies] Django = ">=2.2" -[[package]] -name = "django-cors-headers" -version = "4.6.0" -description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." -optional = false -python-versions = ">=3.9" -files = [ - {file = "django_cors_headers-4.6.0-py3-none-any.whl", hash = "sha256:8edbc0497e611c24d5150e0055d3b178c6534b8ed826fb6f53b21c63f5d48ba3"}, - {file = "django_cors_headers-4.6.0.tar.gz", hash = "sha256:14d76b4b4c8d39375baeddd89e4f08899051eeaf177cb02a29bd6eae8cf63aa8"}, -] - -[package.dependencies] -asgiref = ">=3.6" -django = ">=4.2" - [[package]] name = "django-debug-toolbar" version = "4.4.6" @@ -1279,19 +1230,6 @@ libcloud = ["apache-libcloud"] s3 = ["boto3 (>=1.4.4)"] sftp = ["paramiko (>=1.15)"] -[[package]] -name = "django-tellme" -version = "0.7.3" -description = "A simple Django app that enables user feedback." -optional = false -python-versions = ">=2.7" -files = [ - {file = "django-tellme-0.7.3.tar.gz", hash = "sha256:bebff4b5e2228583ed52baf372299d7d70a9c86da44ce357bb87ccad7f875fc8"}, -] - -[package.dependencies] -Pillow = ">=8.4.0" - [[package]] name = "django-timedeltafield" version = "0.7.10" @@ -1375,42 +1313,34 @@ uritemplate = ">=3.0.0" coreapi = ["coreapi (>=2.3.3)", "coreschema (>=0.0.4)"] validation = ["swagger-spec-validator (>=2.1.0)"] -[[package]] -name = "easyprocess" -version = "1.1" -description = "Easy to use Python subprocess interface." -optional = false -python-versions = "*" -files = [ - {file = "EasyProcess-1.1-py3-none-any.whl", hash = "sha256:82eed523a0a5eb12a81fa4eacd9f342caeb3f900eb4b798740e6696ad07e63f9"}, - {file = "EasyProcess-1.1.tar.gz", hash = "sha256:885898302a57aab948973e8b5d32a4229392b9fb2d986ab1d4ffd590e5ba90ec"}, -] - [[package]] name = "emoji" -version = "2.14.0" +version = "2.14.1" description = "Emoji for Python" optional = false python-versions = ">=3.7" files = [ - {file = "emoji-2.14.0-py3-none-any.whl", hash = "sha256:fcc936bf374b1aec67dda5303ae99710ba88cc9cdce2d1a71c5f2204e6d78799"}, - {file = "emoji-2.14.0.tar.gz", hash = "sha256:f68ac28915a2221667cddb3e6c589303c3c6954c6c5af6fefaec7f9bdf72fdca"}, + {file = "emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b"}, + {file = "emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b"}, ] [package.extras] dev = ["coverage", "pytest (>=7.4.4)"] [[package]] -name = "et-xmlfile" -version = "2.0.0" -description = "An implementation of lxml.xmlfile for the standard library" +name = "eval-type-backport" +version = "0.2.2" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." optional = false python-versions = ">=3.8" files = [ - {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, - {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, + {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, + {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, ] +[package.extras] +tests = ["pytest"] + [[package]] name = "faiss-cpu" version = "1.9.0.post1" @@ -1452,18 +1382,18 @@ packaging = "*" [[package]] name = "filelock" -version = "3.16.1" +version = "3.17.0" description = "A platform independent file lock." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, + {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, + {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] @@ -1479,61 +1409,61 @@ files = [ [[package]] name = "fonttools" -version = "4.55.3" +version = "4.55.6" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1dcc07934a2165ccdc3a5a608db56fb3c24b609658a5b340aee4ecf3ba679dc0"}, - {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7d66c15ba875432a2d2fb419523f5d3d347f91f48f57b8b08a2dfc3c39b8a3f"}, - {file = "fonttools-4.55.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e4ae3592e62eba83cd2c4ccd9462dcfa603ff78e09110680a5444c6925d841"}, - {file = "fonttools-4.55.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d65a3022c35e404d19ca14f291c89cc5890032ff04f6c17af0bd1927299674"}, - {file = "fonttools-4.55.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d342e88764fb201286d185093781bf6628bbe380a913c24adf772d901baa8276"}, - {file = "fonttools-4.55.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dd68c87a2bfe37c5b33bcda0fba39b65a353876d3b9006fde3adae31f97b3ef5"}, - {file = "fonttools-4.55.3-cp310-cp310-win32.whl", hash = "sha256:1bc7ad24ff98846282eef1cbeac05d013c2154f977a79886bb943015d2b1b261"}, - {file = "fonttools-4.55.3-cp310-cp310-win_amd64.whl", hash = "sha256:b54baf65c52952db65df39fcd4820668d0ef4766c0ccdf32879b77f7c804d5c5"}, - {file = "fonttools-4.55.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c4491699bad88efe95772543cd49870cf756b019ad56294f6498982408ab03e"}, - {file = "fonttools-4.55.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5323a22eabddf4b24f66d26894f1229261021dacd9d29e89f7872dd8c63f0b8b"}, - {file = "fonttools-4.55.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5480673f599ad410695ca2ddef2dfefe9df779a9a5cda89503881e503c9c7d90"}, - {file = "fonttools-4.55.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da9da6d65cd7aa6b0f806556f4985bcbf603bf0c5c590e61b43aa3e5a0f822d0"}, - {file = "fonttools-4.55.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e894b5bd60d9f473bed7a8f506515549cc194de08064d829464088d23097331b"}, - {file = "fonttools-4.55.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aee3b57643827e237ff6ec6d28d9ff9766bd8b21e08cd13bff479e13d4b14765"}, - {file = "fonttools-4.55.3-cp311-cp311-win32.whl", hash = "sha256:eb6ca911c4c17eb51853143624d8dc87cdcdf12a711fc38bf5bd21521e79715f"}, - {file = "fonttools-4.55.3-cp311-cp311-win_amd64.whl", hash = "sha256:6314bf82c54c53c71805318fcf6786d986461622dd926d92a465199ff54b1b72"}, - {file = "fonttools-4.55.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f9e736f60f4911061235603a6119e72053073a12c6d7904011df2d8fad2c0e35"}, - {file = "fonttools-4.55.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a8aa2c5e5b8b3bcb2e4538d929f6589a5c6bdb84fd16e2ed92649fb5454f11c"}, - {file = "fonttools-4.55.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07f8288aacf0a38d174445fc78377a97fb0b83cfe352a90c9d9c1400571963c7"}, - {file = "fonttools-4.55.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8d5e8916c0970fbc0f6f1bece0063363bb5857a7f170121a4493e31c3db3314"}, - {file = "fonttools-4.55.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ae3b6600565b2d80b7c05acb8e24d2b26ac407b27a3f2e078229721ba5698427"}, - {file = "fonttools-4.55.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54153c49913f45065c8d9e6d0c101396725c5621c8aee744719300f79771d75a"}, - {file = "fonttools-4.55.3-cp312-cp312-win32.whl", hash = "sha256:827e95fdbbd3e51f8b459af5ea10ecb4e30af50221ca103bea68218e9615de07"}, - {file = "fonttools-4.55.3-cp312-cp312-win_amd64.whl", hash = "sha256:e6e8766eeeb2de759e862004aa11a9ea3d6f6d5ec710551a88b476192b64fd54"}, - {file = "fonttools-4.55.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a430178ad3e650e695167cb53242dae3477b35c95bef6525b074d87493c4bf29"}, - {file = "fonttools-4.55.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:529cef2ce91dc44f8e407cc567fae6e49a1786f2fefefa73a294704c415322a4"}, - {file = "fonttools-4.55.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e75f12c82127486fac2d8bfbf5bf058202f54bf4f158d367e41647b972342ca"}, - {file = "fonttools-4.55.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:859c358ebf41db18fb72342d3080bce67c02b39e86b9fbcf1610cca14984841b"}, - {file = "fonttools-4.55.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:546565028e244a701f73df6d8dd6be489d01617863ec0c6a42fa25bf45d43048"}, - {file = "fonttools-4.55.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:aca318b77f23523309eec4475d1fbbb00a6b133eb766a8bdc401faba91261abe"}, - {file = "fonttools-4.55.3-cp313-cp313-win32.whl", hash = "sha256:8c5ec45428edaa7022f1c949a632a6f298edc7b481312fc7dc258921e9399628"}, - {file = "fonttools-4.55.3-cp313-cp313-win_amd64.whl", hash = "sha256:11e5de1ee0d95af4ae23c1a138b184b7f06e0b6abacabf1d0db41c90b03d834b"}, - {file = "fonttools-4.55.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:caf8230f3e10f8f5d7593eb6d252a37caf58c480b19a17e250a63dad63834cf3"}, - {file = "fonttools-4.55.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b586ab5b15b6097f2fb71cafa3c98edfd0dba1ad8027229e7b1e204a58b0e09d"}, - {file = "fonttools-4.55.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8c2794ded89399cc2169c4d0bf7941247b8d5932b2659e09834adfbb01589aa"}, - {file = "fonttools-4.55.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf4fe7c124aa3f4e4c1940880156e13f2f4d98170d35c749e6b4f119a872551e"}, - {file = "fonttools-4.55.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:86721fbc389ef5cc1e2f477019e5069e8e4421e8d9576e9c26f840dbb04678de"}, - {file = "fonttools-4.55.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:89bdc5d88bdeec1b15af790810e267e8332d92561dce4f0748c2b95c9bdf3926"}, - {file = "fonttools-4.55.3-cp38-cp38-win32.whl", hash = "sha256:bc5dbb4685e51235ef487e4bd501ddfc49be5aede5e40f4cefcccabc6e60fb4b"}, - {file = "fonttools-4.55.3-cp38-cp38-win_amd64.whl", hash = "sha256:cd70de1a52a8ee2d1877b6293af8a2484ac82514f10b1c67c1c5762d38073e56"}, - {file = "fonttools-4.55.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bdcc9f04b36c6c20978d3f060e5323a43f6222accc4e7fcbef3f428e216d96af"}, - {file = "fonttools-4.55.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c3ca99e0d460eff46e033cd3992a969658c3169ffcd533e0a39c63a38beb6831"}, - {file = "fonttools-4.55.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22f38464daa6cdb7b6aebd14ab06609328fe1e9705bb0fcc7d1e69de7109ee02"}, - {file = "fonttools-4.55.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed63959d00b61959b035c7d47f9313c2c1ece090ff63afea702fe86de00dbed4"}, - {file = "fonttools-4.55.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5e8d657cd7326eeaba27de2740e847c6b39dde2f8d7cd7cc56f6aad404ddf0bd"}, - {file = "fonttools-4.55.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fb594b5a99943042c702c550d5494bdd7577f6ef19b0bc73877c948a63184a32"}, - {file = "fonttools-4.55.3-cp39-cp39-win32.whl", hash = "sha256:dc5294a3d5c84226e3dbba1b6f61d7ad813a8c0238fceea4e09aa04848c3d851"}, - {file = "fonttools-4.55.3-cp39-cp39-win_amd64.whl", hash = "sha256:aedbeb1db64496d098e6be92b2e63b5fac4e53b1b92032dfc6988e1ea9134a4d"}, - {file = "fonttools-4.55.3-py3-none-any.whl", hash = "sha256:f412604ccbeee81b091b420272841e5ec5ef68967a9790e80bffd0e30b8e2977"}, - {file = "fonttools-4.55.3.tar.gz", hash = "sha256:3983313c2a04d6cc1fe9251f8fc647754cf49a61dac6cb1e7249ae67afaafc45"}, + {file = "fonttools-4.55.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:57d55fc965e5dd20c8a60d880e0f43bafb506be87af0b650bdc42591e41e0d0d"}, + {file = "fonttools-4.55.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:127999618afe3a2490fad54bab0650c5fbeab1f8109bdc0205f6ad34306deb8b"}, + {file = "fonttools-4.55.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3226d40cb92787e09dcc3730f54b3779dfe56bdfea624e263685ba17a6faac4"}, + {file = "fonttools-4.55.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e82772f70b84e17aa36e9f236feb2a4f73cb686ec1e162557a36cf759d1acd58"}, + {file = "fonttools-4.55.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a632f85bd73e002b771bcbcdc512038fa5d2e09bb18c03a22fb8d400ea492ddf"}, + {file = "fonttools-4.55.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:791e0cf862cdd3a252df395f1bb5f65e3a760f1da3c7ce184d0f7998c266614d"}, + {file = "fonttools-4.55.6-cp310-cp310-win32.whl", hash = "sha256:94f7f2c5c5f3a6422e954ecb6d37cc363e27d6f94050a7ed3f79f12157af6bb2"}, + {file = "fonttools-4.55.6-cp310-cp310-win_amd64.whl", hash = "sha256:2d15e02b93a46982a8513a208e8f89148bca8297640527365625be56151687d0"}, + {file = "fonttools-4.55.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0879f99eabbf2171dfadd9c8c75cec2b7b3aa9cd1f3955dd799c69d60a5189ef"}, + {file = "fonttools-4.55.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d77d83ca77a4c3156a2f4cbc7f09f5a8503795da658fa255b987ad433a191266"}, + {file = "fonttools-4.55.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07478132407736ee5e54f9f534e73923ae28e9bb6dba17764a35e3caf7d7fea3"}, + {file = "fonttools-4.55.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c06fbc2fd76b9bab03eddfd8aa9fb7c0981d314d780e763c80aa76be1c9982"}, + {file = "fonttools-4.55.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:09ed667c4753e1270994e5398cce8703e6423c41702a55b08f843b2907b1be65"}, + {file = "fonttools-4.55.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0ee6ed68af8d57764d69da099db163aaf37d62ba246cfd42f27590e3e6724b55"}, + {file = "fonttools-4.55.6-cp311-cp311-win32.whl", hash = "sha256:9f99e7876518b2d059a9cc67c506168aebf9c71ac8d81006d75e684222f291d2"}, + {file = "fonttools-4.55.6-cp311-cp311-win_amd64.whl", hash = "sha256:3aa6c684007723895aade9b2fe76d07008c9dc90fd1ef6c310b3ca9c8566729f"}, + {file = "fonttools-4.55.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:51120695ee13001533e50abd40eec32c01b9c6f44c5567db38a7acd3eedcd19d"}, + {file = "fonttools-4.55.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:76ac5a595f86892b49ba86ba2e46185adc76328ce6eff0583b30e5c3ab02a914"}, + {file = "fonttools-4.55.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b7535a5ac386e549e2b00b34c59b53f805e2423000676723b6867df3c10df04"}, + {file = "fonttools-4.55.6-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c42009177d3690894288082d5e3dac6bdc9f5d38e25054535e341a19cf5183a4"}, + {file = "fonttools-4.55.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:88f74bc19dbab3dee6a00ca67ca54bb4793e44ff0c4dcf1fa61d68651ae3fa0a"}, + {file = "fonttools-4.55.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bc6f58976ffc19fe1630119a2736153b66151d023c6f30065f31c9e8baed1303"}, + {file = "fonttools-4.55.6-cp312-cp312-win32.whl", hash = "sha256:4259159715142c10b0f4d121ef14da3fa6eafc719289d9efa4b20c15e57fef82"}, + {file = "fonttools-4.55.6-cp312-cp312-win_amd64.whl", hash = "sha256:d91fce2e9a87cc0db9f8042281b6458f99854df810cfefab2baf6ab2acc0f4b4"}, + {file = "fonttools-4.55.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9394813cc73fa22c5413ec1c5745c0a16f68dd2b890f7c55eaba5cb40187ed55"}, + {file = "fonttools-4.55.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ac817559a7d245454231374e194b4e457dca6fefa5b52af466ab0516e9a09c6e"}, + {file = "fonttools-4.55.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34405f1314f1e88b1877a9f9e497fe45190e8c4b29a6c7cd85ed7f666a57d702"}, + {file = "fonttools-4.55.6-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5469bbf555047efd8752d85faeb2a3510916ddc6c50dd6fb168edf1677408f"}, + {file = "fonttools-4.55.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8a8004a19195eb8a8a13de69e26ec9ed60a5bc1fde336d0021b47995b368fac9"}, + {file = "fonttools-4.55.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:73a4aaf672e7b2265c6354a69cbbadf71b7f3133ecb74e98fec4c67c366698a3"}, + {file = "fonttools-4.55.6-cp313-cp313-win32.whl", hash = "sha256:73bdff9c44d36c57ea84766afc20517eda0c9bb1571b4a09876646264bd5ff3b"}, + {file = "fonttools-4.55.6-cp313-cp313-win_amd64.whl", hash = "sha256:132fa22be8a99784de8cb171b30425a581f04a40ec1c05183777fb2b1fe3bac9"}, + {file = "fonttools-4.55.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8398928acb8a57073606feb9a310682d4a7e2d7536f2c61719261f4c0974504c"}, + {file = "fonttools-4.55.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2f78ebfdef578d4db7c44bc207ac5f9a5c1f22c9db606460dcc8ad48e183338"}, + {file = "fonttools-4.55.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb545f3a4ebada908fa717ec732277de18dd10161f03ee3b3144d34477804de"}, + {file = "fonttools-4.55.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1062daa0390b32bfd062ded2b450db9e9cf10e5a9919561c13f535e818b1952b"}, + {file = "fonttools-4.55.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:860ab9ed3f9e088d3bdb77b9074e656635f173b039e77d550b603cba052a0dca"}, + {file = "fonttools-4.55.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:03701e7de70c71eb5965cb200986b0c11dfa3cf8e843e4f517ee30a0f43f0a25"}, + {file = "fonttools-4.55.6-cp38-cp38-win32.whl", hash = "sha256:f66561fbfb75785d06513b8025a50be37bf970c3c413e87581cc6eff10bc78f1"}, + {file = "fonttools-4.55.6-cp38-cp38-win_amd64.whl", hash = "sha256:edf159a8f1e48dc4683a715b36da76dd2f82954b16bfe11a215d58e963d31cfc"}, + {file = "fonttools-4.55.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61aa1997c520bee4cde14ffabe81efc4708c500c8c81dce37831551627a2be56"}, + {file = "fonttools-4.55.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7954ea66a8d835f279c17d8474597a001ddd65a2c1ca97e223041bfbbe11f65e"}, + {file = "fonttools-4.55.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f4e88f15f5ed4d2e4bdfcc98540bb3987ae25904f9be304be9a604e7a7050a1"}, + {file = "fonttools-4.55.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d419483a6295e83cabddb56f1c7b7bfdc8169de2fcb5c68d622bd11140355f9"}, + {file = "fonttools-4.55.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:acc74884afddc2656bffc50100945ff407574538c152931c402fccddc46f0abc"}, + {file = "fonttools-4.55.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a55489c7e9d5ea69690a2afad06723c3d0c48c6d276a25391ea97cb31a16b37c"}, + {file = "fonttools-4.55.6-cp39-cp39-win32.whl", hash = "sha256:8c9de8d16d02ecc8b65e3f3d2d1e3002be2c4a3f094d580faf76d7f768bd45fe"}, + {file = "fonttools-4.55.6-cp39-cp39-win_amd64.whl", hash = "sha256:471961af7a4b8461fac0c8ee044b4986e6fe3746d4c83a1aacbdd85b4eb53f93"}, + {file = "fonttools-4.55.6-py3-none-any.whl", hash = "sha256:d20ab5a78d0536c26628eaadba661e7ae2427b1e5c748a0a510a44d914e1b155"}, + {file = "fonttools-4.55.6.tar.gz", hash = "sha256:1beb4647a0df5ceaea48015656525eb8081af226fe96554089fd3b274d239ef0"}, ] [package.extras] @@ -1663,13 +1593,13 @@ files = [ [[package]] name = "google-api-core" -version = "2.24.0" +version = "2.24.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_core-2.24.0-py3-none-any.whl", hash = "sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9"}, - {file = "google_api_core-2.24.0.tar.gz", hash = "sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf"}, + {file = "google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1"}, + {file = "google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a"}, ] [package.dependencies] @@ -1687,13 +1617,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.37.0" +version = "2.38.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.37.0-py2.py3-none-any.whl", hash = "sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0"}, - {file = "google_auth-2.37.0.tar.gz", hash = "sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00"}, + {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, + {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, ] [package.dependencies] @@ -1939,13 +1869,13 @@ tests = ["freezegun", "pytest", "pytest-cov"] [[package]] name = "identify" -version = "2.6.3" +version = "2.6.6" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, - {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, + {file = "identify-2.6.6-py2.py3-none-any.whl", hash = "sha256:cbd1810bce79f8b671ecb20f53ee0ae8e86ae84b557de31d89709dc2a48ba881"}, + {file = "identify-2.6.6.tar.gz", hash = "sha256:7bec12768ed44ea4761efb47806f0a41f86e7c0a5fdf5950d4648c90eca7e251"}, ] [package.extras] @@ -2393,13 +2323,13 @@ testing = ["coverage", "pyyaml"] [[package]] name = "marshmallow" -version = "3.23.2" +version = "3.26.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" files = [ - {file = "marshmallow-3.23.2-py3-none-any.whl", hash = "sha256:bcaf2d6fd74fb1459f8450e85d994997ad3e70036452cbfa4ab685acb19479b3"}, - {file = "marshmallow-3.23.2.tar.gz", hash = "sha256:c448ac6455ca4d794773f00bae22c2f351d62d739929f761dce5eacb5c468d7f"}, + {file = "marshmallow-3.26.0-py3-none-any.whl", hash = "sha256:1287bca04e6a5f4094822ac153c03da5e214a0a60bcd557b140f3e66991b8ca1"}, + {file = "marshmallow-3.26.0.tar.gz", hash = "sha256:eb36762a1cc76d7abf831e18a3a1b26d3d481bbc74581b8e532a3d3a8115e1cb"}, ] [package.dependencies] @@ -2407,7 +2337,7 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] tests = ["pytest", "simplejson"] [[package]] @@ -2652,17 +2582,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "ndjson" -version = "0.3.1" -description = "JsonDecoder for ndjson" -optional = false -python-versions = "*" -files = [ - {file = "ndjson-0.3.1-py2.py3-none-any.whl", hash = "sha256:839c22275e6baa3040077b83c005ac24199b94973309a8a1809be962c753a410"}, - {file = "ndjson-0.3.1.tar.gz", hash = "sha256:bf9746cb6bb1cb53d172cda7f154c07c786d665ff28341e4e689b796b229e5d6"}, -] - [[package]] name = "nest-asyncio" version = "1.6.0" @@ -2826,31 +2745,6 @@ files = [ [package.extras] tests = ["pytest", "pytest-cov"] -[[package]] -name = "openpyxl" -version = "3.1.5" -description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, - {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, -] - -[package.dependencies] -et-xmlfile = "*" - -[[package]] -name = "orderly-set" -version = "5.2.3" -description = "Orderly set" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orderly_set-5.2.3-py3-none-any.whl", hash = "sha256:d357cedcf67f4ebff0d4cbd5b0997e98eeb65dd24fdf5c990a501ae9e82c7d34"}, - {file = "orderly_set-5.2.3.tar.gz", hash = "sha256:571ed97c5a5fca7ddeb6b2d26c19aca896b0ed91f334d9c109edd2f265fb3017"}, -] - [[package]] name = "outcome" version = "1.3.0.post0" @@ -3122,13 +3016,13 @@ files = [ [[package]] name = "proto-plus" -version = "1.25.0" -description = "Beautiful, Pythonic protocol buffers." +version = "1.26.0" +description = "Beautiful, Pythonic protocol buffers" optional = false python-versions = ">=3.7" files = [ - {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"}, - {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"}, + {file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"}, + {file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"}, ] [package.dependencies] @@ -3139,22 +3033,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.5" +version = "4.25.6" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, - {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, - {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, - {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, - {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, - {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, - {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, - {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, - {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, + {file = "protobuf-4.25.6-cp310-abi3-win32.whl", hash = "sha256:61df6b5786e2b49fc0055f636c1e8f0aff263808bb724b95b164685ac1bcc13a"}, + {file = "protobuf-4.25.6-cp310-abi3-win_amd64.whl", hash = "sha256:b8f837bfb77513fe0e2f263250f423217a173b6d85135be4d81e96a4653bcd3c"}, + {file = "protobuf-4.25.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6d4381f2417606d7e01750e2729fe6fbcda3f9883aa0c32b51d23012bded6c91"}, + {file = "protobuf-4.25.6-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:5dd800da412ba7f6f26d2c08868a5023ce624e1fdb28bccca2dc957191e81fb5"}, + {file = "protobuf-4.25.6-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:4434ff8bb5576f9e0c78f47c41cdf3a152c0b44de475784cd3fd170aef16205a"}, + {file = "protobuf-4.25.6-cp38-cp38-win32.whl", hash = "sha256:8bad0f9e8f83c1fbfcc34e573352b17dfce7d0519512df8519994168dc015d7d"}, + {file = "protobuf-4.25.6-cp38-cp38-win_amd64.whl", hash = "sha256:b6905b68cde3b8243a198268bb46fbec42b3455c88b6b02fb2529d2c306d18fc"}, + {file = "protobuf-4.25.6-cp39-cp39-win32.whl", hash = "sha256:3f3b0b39db04b509859361ac9bca65a265fe9342e6b9406eda58029f5b1d10b2"}, + {file = "protobuf-4.25.6-cp39-cp39-win_amd64.whl", hash = "sha256:6ef2045f89d4ad8d95fd43cd84621487832a61d15b49500e4c1350e8a0ef96be"}, + {file = "protobuf-4.25.6-py3-none-any.whl", hash = "sha256:07972021c8e30b870cfc0863409d033af940213e0e7f64e27fe017b929d2c9f7"}, + {file = "protobuf-4.25.6.tar.gz", hash = "sha256:f8cfbae7c5afd0d0eaccbe73267339bff605a2315860bb1ba08eb66670a9a91f"}, ] [[package]] @@ -3239,7 +3133,6 @@ files = [ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, @@ -3300,13 +3193,13 @@ files = [ [[package]] name = "pydantic" -version = "2.10.5" +version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, - {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] @@ -3484,13 +3377,13 @@ files = [ [[package]] name = "pyparsing" -version = "3.2.0" +version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" files = [ - {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, - {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, + {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, + {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, ] [package.extras] @@ -3498,13 +3391,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "5.1.0" +version = "5.2.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false python-versions = ">=3.8" files = [ - {file = "pypdf-5.1.0-py3-none-any.whl", hash = "sha256:3bd4f503f4ebc58bae40d81e81a9176c400cbbac2ba2d877367595fb524dfdfc"}, - {file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"}, + {file = "pypdf-5.2.0-py3-none-any.whl", hash = "sha256:d107962ec45e65e3bd10c1d9242bdbbedaa38193c9e3a6617bd6d996e5747b19"}, + {file = "pypdf-5.2.0.tar.gz", hash = "sha256:7c38e68420f038f2c4998fd9d6717b6db4f6cef1642e9cf384d519c9cf094663"}, ] [package.extras] @@ -3578,17 +3471,17 @@ files = [ [[package]] name = "python-iso639" -version = "2024.10.22" +version = "2025.1.28" description = "ISO 639 language codes, names, and other associated information" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "python_iso639-2024.10.22-py3-none-any.whl", hash = "sha256:02d3ce2e01c6896b30b9cbbd3e1c8ee0d7221250b5d63ea9803e0d2a81fd1047"}, - {file = "python_iso639-2024.10.22.tar.gz", hash = "sha256:750f21b6a0bc6baa24253a3d8aae92b582bf93aa40988361cd96852c2c6d9a52"}, + {file = "python_iso639-2025.1.28-py3-none-any.whl", hash = "sha256:66bcd88838727bc8ed1dfc9b5a354a27ed5c4bab8322473da81071fae265228b"}, + {file = "python_iso639-2025.1.28.tar.gz", hash = "sha256:42b18bf52ad6ce5882c0e4acec9ae528310c7ef2966b776fc49d154c654580c5"}, ] [package.extras] -dev = ["black (==24.10.0)", "build (==1.2.1)", "flake8 (==7.1.1)", "pytest (==8.3.3)", "requests (==2.32.3)", "twine (==5.1.1)"] +dev = ["black (==24.10.0)", "build (==1.2.2)", "flake8 (==7.1.1)", "mypy (==1.14.1)", "pytest (==8.3.4)", "requests (==2.32.3)", "twine (==6.1.0)"] [[package]] name = "python-magic" @@ -3830,6 +3723,9 @@ files = [ {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} + [package.extras] hiredis = ["hiredis (>=3.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] @@ -4458,13 +4354,13 @@ typing-extensions = ">=3.7.4" [[package]] name = "tzdata" -version = "2024.2" +version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] @@ -4496,26 +4392,15 @@ files = [ {file = "ua_parser_builtins-0.18.0.post1-py3-none-any.whl", hash = "sha256:eb4f93504040c3a990a6b0742a2afd540d87d7f9f05fd66e94c101db1564674d"}, ] -[[package]] -name = "unidecode" -version = "1.3.8" -description = "ASCII transliterations of Unicode text" -optional = false -python-versions = ">=3.5" -files = [ - {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, - {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, -] - [[package]] name = "unstructured" -version = "0.16.15" +version = "0.16.16" description = "A library that prepares raw documents for downstream ML tasks." optional = false python-versions = "<3.13,>=3.9.0" files = [ - {file = "unstructured-0.16.15-py3-none-any.whl", hash = "sha256:5b0931eb92fb858b983fada18111efdf9c2a0c861ef8e9b58c4e05b1daa50e35"}, - {file = "unstructured-0.16.15.tar.gz", hash = "sha256:18fb850d47b5a2a6ea45b2f7e0eda687f903a2f2e58909b1defd48e2b3126ff4"}, + {file = "unstructured-0.16.16-py3-none-any.whl", hash = "sha256:832cacd62a3258b0d6bd0fbedd1233d75f7855ef8cf062b8f02466a0d0396332"}, + {file = "unstructured-0.16.16.tar.gz", hash = "sha256:0147dc3b53f7f0888ed608c9c454b5fca1a41925a09b7884c121239a60c9d8fa"}, ] [package.dependencies] @@ -4528,7 +4413,6 @@ filetype = "*" html5lib = "*" langdetect = "*" lxml = "*" -ndjson = "*" nltk = "*" numpy = "<2" psutil = "*" @@ -4565,39 +4449,27 @@ xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] [[package]] name = "unstructured-client" -version = "0.25.9" +version = "0.29.0" description = "Python Client SDK for Unstructured API" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "unstructured-client-0.25.9.tar.gz", hash = "sha256:fcc461623f58fefb0e22508e28bf653a8f6934b9779cb4a90dd68d77a39fb5b2"}, - {file = "unstructured_client-0.25.9-py3-none-any.whl", hash = "sha256:c984c01878c8fc243be7c842467d1113a194d885ab6396ae74258ee42717c5b5"}, + {file = "unstructured_client-0.29.0-py3-none-any.whl", hash = "sha256:a4a1c9a55835030e38c37d941a5dd8684233779f477aeb52619ac47c2d488d85"}, + {file = "unstructured_client-0.29.0.tar.gz", hash = "sha256:a0bd625f067a1e015d80800f1795cfb4e2e2ee980f0c25d2eabe506e66c38018"}, ] [package.dependencies] -certifi = ">=2023.7.22" -charset-normalizer = ">=3.2.0" +aiofiles = ">=24.1.0" cryptography = ">=3.1" -dataclasses-json = ">=0.6.4" -deepdiff = ">=6.0" +eval-type-backport = ">=0.2.0,<0.3.0" httpx = ">=0.27.0" -idna = ">=3.4" -jsonpath-python = ">=1.0.6" -marshmallow = ">=3.19.0" -mypy-extensions = ">=1.0.0" +jsonpath-python = ">=1.0.6,<2.0.0" nest-asyncio = ">=1.6.0" -packaging = ">=23.1" +pydantic = ">=2.10.3,<2.11.0" pypdf = ">=4.0" -python-dateutil = ">=2.8.2" -requests = ">=2.31.0" +python-dateutil = ">=2.8.2,<3.0.0" requests-toolbelt = ">=1.0.0" -six = ">=1.16.0" -typing-extensions = ">=4.7.1" -typing-inspect = ">=0.9.0" -urllib3 = ">=1.26.18" - -[package.extras] -dev = ["pylint (==3.1.0)"] +typing-inspect = ">=0.9.0,<0.10.0" [[package]] name = "uritemplate" @@ -4664,13 +4536,13 @@ standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", [[package]] name = "virtualenv" -version = "20.28.0" +version = "20.29.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, - {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, + {file = "virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779"}, + {file = "virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35"}, ] [package.dependencies] @@ -4836,76 +4708,90 @@ brotli = ["brotli"] [[package]] name = "wrapt" -version = "1.17.0" +version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" files = [ - {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, - {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, - {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, - {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, - {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, - {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, - {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, - {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, - {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, - {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, - {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, - {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, - {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, - {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, - {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, - {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, - {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, - {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, - {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, - {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, - {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, - {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, - {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, + {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, + {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, + {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, + {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, + {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, + {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, + {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, + {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, + {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, + {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, + {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, + {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, + {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, + {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, + {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, + {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] [[package]] @@ -5021,4 +4907,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.0" python-versions = "3.11.2" -content-hash = "9e92c532af81fecec4b0855a320481a779b88c8e456ec7d745c47d4be4790fbb" +content-hash = "3df876e9fd67ebb34d94f6b4e566156e5f5f2adc75866ad06a65beddfce78207" diff --git a/pyproject.toml b/pyproject.toml index 86f87c393..c71e785f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,19 +21,15 @@ django-import-export = "^4.3.4" django-annoying = "^0.10.7" dj-rest-auth = "^5.0.2" tweepy = "^4.15.0" -Unidecode = "^1.3.8" user-agents = "^2.2.0" whitenoise = "^6.8.2" django-debug-toolbar = "^4.4.6" selenium = "^4.28.1" pylibmc = "^1.6.1" psycopg2-binary = "^2.9.10" -boto = "^2.49.0" -django-cors-headers = "^4.6.0" protobuf = "^4.25.3" django-storages = { extras = ["google"], version = "^1.14.3" } django-timedeltafield = "^0.7.10" -EasyProcess = "^1.1" "giturlparse.py" = "^0.0.5" gunicorn = "^23.0.0" oauthlib = "^3.2.0" @@ -49,8 +45,6 @@ djangorestframework = "^3.15.2" cffi = "^1.17.1" django-mdeditor = "^0.1.20" django-tz-detect = "^0.4.0" -django-tellme = "^0.7.3" -django-bootstrap-datepicker-plus = "^5.0.5" django-star-ratings = "^0.9.2" stripe = "^8.4.0" django-environ = "^0.12.0" @@ -72,14 +66,12 @@ psutil = "^5.9.8" python-bitcoinrpc = "^1.0" sendgrid = "^6.11.0" matplotlib = "^3.10.0" -openpyxl = "^3.1.5" atproto = "^0.0.55" slack-bolt = "^1.22.0" django-redis = "^5.4.0" uvicorn = "^0.34.0" channels = "^4.2.0" channels-redis = "^4.2.1" -async-timeout = "^5.0.1" aiohttp = "^3.11.11" scout-apm = "^3.3.0" newrelic = "^10.4.0" diff --git a/website/admin.py b/website/admin.py index 544f0963a..9d689728e 100644 --- a/website/admin.py +++ b/website/admin.py @@ -37,6 +37,8 @@ SuggestionVotes, Tag, TimeLog, + Trademark, + TrademarkOwner, Transaction, UserProfile, Wallet, @@ -494,3 +496,5 @@ class PostAdmin(admin.ModelAdmin): admin.site.register(Activity) admin.site.register(PRAnalysisReport) admin.site.register(Post, PostAdmin) +admin.site.register(Trademark) +admin.site.register(TrademarkOwner) diff --git a/website/api/views.py b/website/api/views.py index 48feb1c5b..f090191d8 100644 --- a/website/api/views.py +++ b/website/api/views.py @@ -166,15 +166,11 @@ def get_issue_info(self, request, issue): if issue.screenshot: # If an image exists in the Issue table, return it along with additional images from IssueScreenshot screenshots = [request.build_absolute_uri(issue.screenshot.url)] + [ - request.build_absolute_uri(screenshot.image.url) - for screenshot in issue.screenshots.all() + request.build_absolute_uri(screenshot.image.url) for screenshot in issue.screenshots.all() ] else: # If no image exists in the Issue table, return only the images from IssueScreenshot - screenshots = [ - request.build_absolute_uri(screenshot.image.url) - for screenshot in issue.screenshots.all() - ] + screenshots = [request.build_absolute_uri(screenshot.image.url) for screenshot in issue.screenshots.all()] is_upvoted = False is_flagged = False @@ -232,9 +228,7 @@ def create(self, request, *args, **kwargs): screenshot_count = len(self.request.FILES.getlist("screenshots")) if screenshot_count == 0: - return Response( - {"error": "Upload at least one image!"}, status=status.HTTP_400_BAD_REQUEST - ) + return Response({"error": "Upload at least one image!"}, status=status.HTTP_400_BAD_REQUEST) elif screenshot_count > 5: return Response({"error": "Max limit of 5 images!"}, status=status.HTTP_400_BAD_REQUEST) @@ -247,9 +241,7 @@ def create(self, request, *args, **kwargs): for screenshot in self.request.FILES.getlist("screenshots"): if image_validator(screenshot): filename = screenshot.name - screenshot.name = ( - f"{filename[:10]}{str(uuid.uuid4())[:40]}.{filename.split('.')[-1]}" - ) + screenshot.name = f"{filename[:10]}{str(uuid.uuid4())[:40]}.{filename.split('.')[-1]}" file_path = default_storage.save(f"screenshots/{screenshot.name}", screenshot) # Create the IssueScreenshot object and associate it with the issue @@ -387,19 +379,11 @@ def filter(self, request, *args, **kwargs): temp["rank"] = rank_user temp["id"] = each["id"] temp["User"] = each["username"] - temp["score"] = Points.objects.filter(user=each["id"]).aggregate( - total_score=Sum("score") - ) - temp["image"] = list(UserProfile.objects.filter(user=each["id"]).values("user_avatar"))[ - 0 - ] - temp["title_type"] = list(UserProfile.objects.filter(user=each["id"]).values("title"))[ - 0 - ] + temp["score"] = Points.objects.filter(user=each["id"]).aggregate(total_score=Sum("score")) + temp["image"] = list(UserProfile.objects.filter(user=each["id"]).values("user_avatar"))[0] + temp["title_type"] = list(UserProfile.objects.filter(user=each["id"]).values("title"))[0] temp["follows"] = list(UserProfile.objects.filter(user=each["id"]).values("follows"))[0] - temp["savedissue"] = list( - UserProfile.objects.filter(user=each["id"]).values("issue_saved") - )[0] + temp["savedissue"] = list(UserProfile.objects.filter(user=each["id"]).values("issue_saved"))[0] rank_user = rank_user + 1 users.append(temp) @@ -470,9 +454,7 @@ def get(self, request, format=None, *args, **kwargs): def organization_leaderboard(self, request, *args, **kwargs): paginator = PageNumberPagination() organizations = ( - Organization.objects.values() - .annotate(issue_count=Count("domain__issue")) - .order_by("-issue_count") + Organization.objects.values().annotate(issue_count=Count("domain__issue")).order_by("-issue_count") ) page = paginator.paginate_queryset(organizations, request) @@ -486,9 +468,7 @@ def get(self, request, *args, **kwargs): hunt_count = Hunt.objects.all().count() domain_count = Domain.objects.all().count() - return Response( - {"bugs": bug_count, "users": user_count, "hunts": hunt_count, "domains": domain_count} - ) + return Response({"bugs": bug_count, "users": user_count, "hunts": hunt_count, "domains": domain_count}) class UrlCheckApiViewset(APIView): @@ -503,9 +483,7 @@ def post(self, request, *args, **kwargs): domain = domain_url.replace("https://", "").replace("http://", "").replace("www.", "") issues = ( - Issue.objects.filter( - Q(Q(domain__name=domain) | Q(domain__url__icontains=domain)) & Q(is_hidden=False) - ) + Issue.objects.filter(Q(Q(domain__name=domain) | Q(domain__url__icontains=domain)) & Q(is_hidden=False)) .values( "id", "description", @@ -533,27 +511,17 @@ def get_active_hunts(self, request, fields, *args, **kwargs): return Response(hunts) def get_previous_hunts(self, request, fields, *args, **kwargs): - hunts = ( - Hunt.objects.values(*fields) - .filter(is_published=True, end_on__lte=datetime.now()) - .order_by("-end_on") - ) + hunts = Hunt.objects.values(*fields).filter(is_published=True, end_on__lte=datetime.now()).order_by("-end_on") return Response(hunts) def get_upcoming_hunts(self, request, fields, *args, **kwargs): hunts = ( - Hunt.objects.values(*fields) - .filter(is_published=True, starts_on__gte=datetime.now()) - .order_by("starts_on") + Hunt.objects.values(*fields).filter(is_published=True, starts_on__gte=datetime.now()).order_by("starts_on") ) return Response(hunts) def get_search_by_name(self, request, search_query, fields, *args, **kwargs): - hunts = ( - Hunt.objects.values(*fields) - .filter(is_published=True, name__icontains=search_query) - .order_by("end_on") - ) + hunts = Hunt.objects.values(*fields).filter(is_published=True, name__icontains=search_query).order_by("end_on") return Response(hunts) def get(self, request, *args, **kwargs): @@ -608,15 +576,11 @@ def get_active_hunts(self, request, *args, **kwargs): return Response(self.serialize_hunts(hunts)) def get_previous_hunts(self, request, *args, **kwargs): - hunts = Hunt.objects.filter(is_published=True, end_on__lte=datetime.now()).order_by( - "-end_on" - ) + hunts = Hunt.objects.filter(is_published=True, end_on__lte=datetime.now()).order_by("-end_on") return Response(self.serialize_hunts(hunts)) def get_upcoming_hunts(self, request, *args, **kwargs): - hunts = Hunt.objects.filter(is_published=True, starts_on__gte=datetime.now()).order_by( - "starts_on" - ) + hunts = Hunt.objects.filter(is_published=True, starts_on__gte=datetime.now()).order_by("starts_on") return Response(self.serialize_hunts(hunts)) def get(self, request, *args, **kwargs): @@ -626,23 +590,17 @@ def get(self, request, *args, **kwargs): previousHunt = request.query_params.get("previousHunt") upcomingHunt = request.query_params.get("upcomingHunt") if activeHunt: - page = paginator.paginate_queryset( - self.get_active_hunts(request, *args, **kwargs), request - ) + page = paginator.paginate_queryset(self.get_active_hunts(request, *args, **kwargs), request) return paginator.get_paginated_response(page) elif previousHunt: - page = paginator.paginate_queryset( - self.get_previous_hunts(request, *args, **kwargs), request - ) + page = paginator.paginate_queryset(self.get_previous_hunts(request, *args, **kwargs), request) return paginator.get_paginated_response(page) elif upcomingHunt: - page = paginator.paginate_queryset( - self.get_upcoming_hunts(request, *args, **kwargs), request - ) + page = paginator.paginate_queryset(self.get_upcoming_hunts(request, *args, **kwargs), request) return paginator.get_paginated_response(page) @@ -668,9 +626,7 @@ def post(self, request, *args, **kwargs): try: current_site = get_current_site(request) referral_code, created = InviteFriend.objects.get_or_create(sender=request.user) - referral_link = ( - f"https://{current_site.domain}/referral/?ref={referral_code.referral_code}" - ) + referral_link = f"https://{current_site.domain}/referral/?ref={referral_code.referral_code}" # Prepare email content subject = f"Join me on {current_site.name}!" @@ -703,9 +659,7 @@ def post(self, request, *args, **kwargs): } ) else: - return Response( - {"error": "Email failed to send", "email_status": "failed"}, status=500 - ) + return Response({"error": "Email failed to send", "email_status": "failed"}, status=500) except smtplib.SMTPException as e: return Response( diff --git a/website/apps.py b/website/apps.py index 864790176..14ee23a24 100644 --- a/website/apps.py +++ b/website/apps.py @@ -6,4 +6,5 @@ class WebsiteConfig(AppConfig): name = "website" def ready(self): - import website.signals # noqa + import website.challenge_signals # noqa + import website.feed_signals # noqa diff --git a/website/bitcoin_utils.py b/website/bitcoin_utils.py index 986a5c0e2..ef4cf6972 100644 --- a/website/bitcoin_utils.py +++ b/website/bitcoin_utils.py @@ -25,9 +25,7 @@ def create_bacon_token(user, contribution): contribution.txid = txid contribution.save() - token = BaconToken.objects.create( - user=user, amount=amount, contribution=contribution, token_id=txid - ) + token = BaconToken.objects.create(user=user, amount=amount, contribution=contribution, token_id=txid) return token except JSONRPCException as e: diff --git a/website/bot.py b/website/bot.py index 97ce6d7c8..3b3295dd8 100644 --- a/website/bot.py +++ b/website/bot.py @@ -133,9 +133,7 @@ def embed_documents_and_save(embed_docs): if file.is_file(): with open(file, "rb") as f: content = f.read() - default_storage.save( - str(db_folder_path / file.relative_to(temp_db_path)), ContentFile(content) - ) + default_storage.save(str(db_folder_path / file.relative_to(temp_db_path)), ContentFile(content)) log_chat(f"Uploaded file {file.name} to storage") except Exception as e: log_chat(f"Error during FAISS index embedding and saving: {e}") @@ -156,9 +154,7 @@ def load_vector_store(): check_db_folder_str = db_folder_str + "/index.faiss" if not default_storage.exists(check_db_folder_str): temp_dir.cleanup() - ChatBotLog.objects.create( - question="Folder does not exist", answer=f"Folder Str: {str(db_folder_str)}" - ) + ChatBotLog.objects.create(question="Folder does not exist", answer=f"Folder Str: {str(db_folder_str)}") return None # Download all files from the storage folder to the temp directory @@ -193,9 +189,7 @@ def conversation_chain(vector_store): ) ) llm = ChatOpenAI(model_name="gpt-3.5-turbo-0125", temperature=0.5) - retriever = vector_store.as_retriever( - search_type="similarity", search_kwargs={"k": retrieval_search_results} - ) + retriever = vector_store.as_retriever(search_type="similarity", search_kwargs={"k": retrieval_search_results}) memory = ConversationSummaryMemory( llm=llm, return_messages=True, diff --git a/website/challenge_signals.py b/website/challenge_signals.py new file mode 100644 index 000000000..e2374e0e9 --- /dev/null +++ b/website/challenge_signals.py @@ -0,0 +1,199 @@ +from django.db.models.signals import post_save +from django.dispatch import receiver +from django.utils import timezone + +from .models import Challenge, IpReport, Issue, Points, TimeLog, UserProfile + + +def update_challenge_progress(user, challenge_title, model_class, reason, threshold=None, team_threshold=None): + if not user.is_authenticated: + return + try: + challenge = Challenge.objects.get(title=challenge_title) + + if challenge.challenge_type == "team": + # Get the user's team + user_profile = user.userprofile + if user_profile.team is None: + return + + team = user_profile.team + if team not in challenge.team_participants.all(): + challenge.team_participants.add(team) + + total_actions = 0 + for member in team.user_profiles.all(): + total_actions += model_class.objects.filter(user=member.user).count() + + # Calculate progress based on actions performed by the team + team_progress = min((total_actions / team_threshold) * 100, 100) + + challenge.progress = int(team_progress) + challenge.save() + + if team_progress == 100 and not challenge.completed: + challenge.completed = True # Explicitly mark the challenge as completed + challenge.completed_at = timezone.now() # Track completion time (optional) + challenge.save() # Save changes to the challenge + + team.team_points += challenge.points + team.save() + else: + if user not in challenge.participants.all(): + challenge.participants.add(user) + + user_count = model_class.objects.filter(user=user).count() + progress = min((user_count / threshold) * 100, 100) # Ensure it doesn't exceed 100% + + challenge.progress = int(progress) + challenge.save() + print(challenge.completed) + if challenge.progress == 100 and not challenge.completed: + challenge.completed = True # Explicitly mark the challenge as completed + challenge.completed_at = timezone.now() + challenge.save() + + # Award points to the user + Points.objects.create(user=user, score=challenge.points, reason=reason) + + except Challenge.DoesNotExist: + pass + + +@receiver(post_save) +def handle_post_save(sender, instance, created, **kwargs): + """Generic handler for post_save signal.""" + if sender == IpReport and created: # Track first IP report + if instance.user and instance.user.is_authenticated: + update_challenge_progress( + user=instance.user, + challenge_title="Report 5 IPs", + model_class=IpReport, + reason="Completed 'Report 5 IPs' challenge", + threshold=5, + ) + if instance.user.is_authenticated and instance.user.userprofile.team: + update_challenge_progress( + user=instance.user, + challenge_title="Report 10 IPs", + model_class=IpReport, + reason="Completed 'Report 10 IPs challenge", + team_threshold=10, # For team challenge + ) + + elif sender == Issue and created: # Track first bug report + if instance.user and instance.user.is_authenticated: + update_challenge_progress( + user=instance.user, + challenge_title="Report 5 Issues", + model_class=Issue, + reason="Completed 'Report 5 Issues challenge", + threshold=5, + ) + if instance.user.is_authenticated and instance.user.userprofile.team: + update_challenge_progress( + user=instance.user, + challenge_title="Report 10 Issues", + model_class=Issue, + reason="Completed 'Report 10 Issues challenge", + team_threshold=10, # For team challenge + ) + + +@receiver(post_save, sender=TimeLog) +def update_user_streak(sender, instance, created, **kwargs): + if created and instance.user and instance.user.is_authenticated: + check_in_date = instance.start_time.date() # Extract the date from TimeLog + user = instance.user + + try: + user_profile = user.userprofile + user_profile.update_streak_and_award_points(check_in_date) + + handle_sign_in_challenges(user, user_profile) + + if user_profile.team: + handle_team_sign_in_challenges(user_profile.team) + + except UserProfile.DoesNotExist: + pass + + +def handle_sign_in_challenges(user, user_profile): + """ + Update progress for single challenges based on the user's streak. + """ + try: + print("Handling user sign-in challenge...") + challenge_title = "Sign in for 5 Days" + challenge = Challenge.objects.get(title=challenge_title, challenge_type="single") + + if user not in challenge.participants.all(): + challenge.participants.add(user) + + streak_count = user_profile.current_streak + print(streak_count) + + if streak_count >= 5: + progress = 100 + else: + progress = streak_count * 100 / 5 # Calculate progress if streak is less than 5 + print(progress) + # Update the challenge progress + challenge.progress = int(progress) + challenge.save() + + # Award points if the challenge is completed (when streak is 5) + if progress == 100 and not challenge.completed: + challenge.completed = True + challenge.completed_at = timezone.now() + challenge.save() + + Points.objects.create( + user=user, + score=challenge.points, + reason=f"Completed '{challenge_title}' challenge", + ) + + except Challenge.DoesNotExist: + # Handle case when the challenge does not exist + pass + + +def handle_team_sign_in_challenges(team): + """ + Update progress for team challenges where all members must sign in for 5 days consecutively. + """ + try: + challenge_title = "All Members Sign in for 5 Days" # Title of the team challenge + challenge = Challenge.objects.get(title=challenge_title, challenge_type="team") + print("Handling team sign-in challenge...") + + # Ensure the team is registered as a participant + if team not in challenge.team_participants.all(): + challenge.team_participants.add(team) + + # Get streaks for all team members + streaks = [member.current_streak for member in team.user_profiles.all()] + + if streaks: # If the team has members + min_streak = min(streaks) + progress = min((min_streak / 5) * 100, 100) + else: + min_streak = 0 + progress = 0 + + challenge.progress = int(progress) + challenge.save() + + if progress == 100 and not challenge.completed: + challenge.completed = True + challenge.completed_at = timezone.now() + challenge.save() + + # Add points to the team + team.team_points += challenge.points + team.save() + except Challenge.DoesNotExist: + print(f"Challenge '{challenge_title}' does not exist.") + pass diff --git a/website/consumers.py b/website/consumers.py index 517544290..3c2cfb2e6 100644 --- a/website/consumers.py +++ b/website/consumers.py @@ -52,9 +52,7 @@ async def receive(self, text_data): branch2 = data.get("branch2") # Branch name for the second repository if not repo1 or not repo2 or not type1 or not type2: - await self.send( - json.dumps({"error": "Both repositories and their types are required."}) - ) + await self.send(json.dumps({"error": "Both repositories and their types are required."})) return if type1 not in ["github", "zip"] or type2 not in ["github", "zip"]: @@ -152,9 +150,7 @@ async def download_and_extract_zip(self, zip_url, temp_dir, repo_name): async with aiohttp.ClientSession() as session: async with session.get(zip_url) as response: if response.status != 200: - raise Exception( - f"Failed to download ZIP file. Status code: {response.status}" - ) + raise Exception(f"Failed to download ZIP file. Status code: {response.status}") # Extract the ZIP file zip_file_path = Path(temp_dir) / f"{repo_name}.zip" @@ -229,9 +225,7 @@ def process_similarity_analysis(self, repo1_path, repo2_path): if i % 5 == 0: # Ping the frontend every 5 iterations try: - asyncio.run( - self.send(json.dumps({"ping": "ping"})) - ) # Send ping from the worker thread + asyncio.run(self.send(json.dumps({"ping": "ping"}))) # Send ping from the worker thread except Exception as e: return None # Stop the analysis if the connection is lost i += 1 @@ -247,23 +241,14 @@ def process_similarity_analysis(self, repo1_path, repo2_path): for func1 in functions1: for func2 in functions2: name_similarity = ( - difflib.SequenceMatcher( - None, func1["signature"]["name"], func2["signature"]["name"] - ).ratio() - * 100 + difflib.SequenceMatcher(None, func1["signature"]["name"], func2["signature"]["name"]).ratio() * 100 ) # Signature similarity using difflib - signature1 = ( - f"{func1['signature']['name']}({', '.join(func1['signature']['args'])})" - ) - signature2 = ( - f"{func2['signature']['name']}({', '.join(func2['signature']['args'])})" - ) + signature1 = f"{func1['signature']['name']}({', '.join(func1['signature']['args'])})" + signature2 = f"{func2['signature']['name']}({', '.join(func2['signature']['args'])})" - signature_similarity = ( - difflib.SequenceMatcher(None, signature1, signature2).ratio() * 100 - ) + signature_similarity = difflib.SequenceMatcher(None, signature1, signature2).ratio() * 100 # Content similarity using OpenAI embeddings fulltext1 = func1["full_text"] @@ -302,9 +287,7 @@ def process_similarity_analysis(self, repo1_path, repo2_path): models2 = extract_django_models(repo2_path) for model1 in models1: for model2 in models2: - model_similarity = ( - difflib.SequenceMatcher(None, model1["name"], model2["name"]).ratio() * 100 - ) + model_similarity = difflib.SequenceMatcher(None, model1["name"], model2["name"]).ratio() * 100 model_fields_similarity = compare_model_fields(model1, model2) matching_details["models"].append( diff --git a/website/signals.py b/website/feed_signals.py similarity index 88% rename from website/signals.py rename to website/feed_signals.py index 62bc9b2d2..821d2f80e 100644 --- a/website/signals.py +++ b/website/feed_signals.py @@ -3,19 +3,7 @@ from django.db.models.signals import post_save, pre_delete from django.dispatch import receiver -from .models import ( - Activity, - Badge, - Bid, - Hunt, - IpReport, - Issue, - Post, - Suggestion, - TimeLog, - UserBadge, - UserProfile, -) +from .models import Activity, Badge, Bid, Hunt, IpReport, Issue, Post, Suggestion, TimeLog, UserBadge, UserProfile def get_default_user(): @@ -38,9 +26,7 @@ def create_activity(instance, action_type): """Generic function to create an activity for a given model instance.""" model_name = instance._meta.model_name user_field = ( - getattr(instance, "user", None) - or getattr(instance, "author", None) - or getattr(instance, "modified_by", None) + getattr(instance, "user", None) or getattr(instance, "author", None) or getattr(instance, "modified_by", None) ) user = user_field or get_default_user() @@ -111,7 +97,7 @@ def handle_post_save(sender, instance, created, **kwargs): @receiver(pre_delete) def handle_pre_delete(sender, instance, **kwargs): """Generic handler for pre_delete signal.""" - if sender in [Issue, Hunt, IpReport, Post]: # Add any model you want to track + if sender in [Issue, Hunt, IpReport, Post]: create_activity(instance, "deleted") @@ -120,15 +106,12 @@ def update_user_streak(sender, instance, created, **kwargs): """ Automatically update user's streak when a TimeLog is created """ - if created: - # Use the date of the start_time for streak tracking + if created and instance.user and instance.user.is_authenticated: check_in_date = instance.start_time.date() - # Get the user's profile and update streak try: user_profile = instance.user.userprofile user_profile.update_streak_and_award_points(check_in_date) except UserProfile.DoesNotExist: - # Fallback: create profile if it doesn't exist UserProfile.objects.create( user=instance.user, current_streak=1, longest_streak=1, last_check_in=check_in_date ) diff --git a/website/forms.py b/website/forms.py index ace00fd8f..292e13c6e 100644 --- a/website/forms.py +++ b/website/forms.py @@ -1,3 +1,4 @@ +from allauth.account.forms import SignupForm from captcha.fields import CaptchaField from django import forms from mdeditor.fields import MDTextFormField @@ -111,3 +112,13 @@ class GitHubURLForm(forms.Form): required=True, widget=forms.TextInput(attrs={"placeholder": "Add any Github URL"}), ) + + +class SignupFormWithCaptcha(SignupForm, CaptchaForm): + def clean(self): + cleaned_data = super().clean() + return cleaned_data + + def save(self, request): + user = super().save(request) + return user diff --git a/website/management/commands/check_keywords.py b/website/management/commands/check_keywords.py index 9bbf0050e..f60ad656c 100644 --- a/website/management/commands/check_keywords.py +++ b/website/management/commands/check_keywords.py @@ -35,17 +35,13 @@ def handle(self, *args, **options): monitor.last_checked_time = timezone.now() monitor.save() - self.stdout.write( - self.style.SUCCESS(f"Monitoring {monitor.url}: status {monitor.status}") - ) + self.stdout.write(self.style.SUCCESS(f"Monitoring {monitor.url}: status {monitor.status}")) except Exception as e: self.stderr.write(self.style.ERROR(f"Error monitoring {monitor.url}: {str(e)}")) def notify_user(self, username, website, email, status): subject = f"Website Status Update: {website} is {status}" - message = ( - f"Dear {username},\n\nThe website '{website}' you are monitoring is currently {status}." - ) + message = f"Dear {username},\n\nThe website '{website}' you are monitoring is currently {status}." send_mail( subject, diff --git a/website/management/commands/check_owasp_projects.py b/website/management/commands/check_owasp_projects.py index 427bc7854..0e57bb271 100644 --- a/website/management/commands/check_owasp_projects.py +++ b/website/management/commands/check_owasp_projects.py @@ -23,16 +23,12 @@ def handle(self, *args, **options): slack_webhook_url = os.environ.get("SLACK_WEBHOOK_URL") if not github_token: - self.stderr.write( - self.style.ERROR("GITHUB_TOKEN is not configured in settings. Aborting.") - ) + self.stderr.write(self.style.ERROR("GITHUB_TOKEN is not configured in settings. Aborting.")) return if not slack_webhook_url: self.stderr.write( - self.style.WARNING( - "SLACK_WEBHOOK_URL not found in environment. Slack notifications will be disabled." - ) + self.style.WARNING("SLACK_WEBHOOK_URL not found in environment. Slack notifications will be disabled.") ) headers = { @@ -45,9 +41,7 @@ def handle(self, *args, **options): max_rate_limit_retries = 5 # Get or create OWASP organization - org, created = Organization.objects.get_or_create( - name__iexact="OWASP", defaults={"name": "OWASP"} - ) + org, created = Organization.objects.get_or_create(name__iexact="OWASP", defaults={"name": "OWASP"}) # Fetch all www-project repos www_project_repos = self.fetch_all_repos(headers) @@ -66,9 +60,7 @@ def handle(self, *args, **options): # Skip if repo already exists by checking Repo table if Repo.objects.filter(repo_url=repo_url).exists(): - self.stdout.write( - self.style.WARNING(f"Repository {repo_url} already exists. Skipping...") - ) + self.stdout.write(self.style.WARNING(f"Repository {repo_url} already exists. Skipping...")) continue self.stdout.write(f"Processing repository {repo_url}") @@ -93,8 +85,7 @@ def handle(self, *args, **options): "name": project_name, "description": repo_data.get("description") or "No description available", # Set default description - "url": base_url - or repo_data.get("html_url", ""), # Fallback to repo URL if no homepage + "url": base_url or repo_data.get("html_url", ""), # Fallback to repo URL if no homepage "organization": org, } @@ -132,9 +123,7 @@ def handle(self, *args, **options): watchers=repo_info.get("watchers", 0), primary_language=repo_info.get("primary_language", ""), license=repo_info.get("license", ""), - last_commit_date=self.parse_date_safely( - repo_info.get("last_commit_date") - ), + last_commit_date=self.parse_date_safely(repo_info.get("last_commit_date")), created=self.parse_date_safely(repo_info.get("created")), modified=self.parse_date_safely(repo_info.get("modified")), network_count=repo_info.get("network_count", 0), @@ -148,9 +137,7 @@ def handle(self, *args, **options): contributor_count=repo_info.get("contributor_count", 0), commit_count=repo_info.get("commit_count", 0), release_name=repo_info.get("release_name", ""), - release_datetime=self.parse_date_safely( - repo_info.get("release_datetime") - ), + release_datetime=self.parse_date_safely(repo_info.get("release_datetime")), ) # Add to new projects list for Slack notification @@ -175,29 +162,19 @@ def handle(self, *args, **options): if repo_info.get("logo_url"): self.fetch_and_save_logo(project, repo_info["logo_url"], headers) - self.stdout.write( - self.style.SUCCESS(f"Successfully created project and repo: {project_name}") - ) + self.stdout.write(self.style.SUCCESS(f"Successfully created project and repo: {project_name}")) except ValueError as ve: - self.stderr.write( - self.style.ERROR(f"Validation error for {repo_url}: {str(ve)}") - ) + self.stderr.write(self.style.ERROR(f"Validation error for {repo_url}: {str(ve)}")) continue except IntegrityError as ie: - self.stderr.write( - self.style.ERROR(f"Database integrity error for {repo_url}: {str(ie)}") - ) + self.stderr.write(self.style.ERROR(f"Database integrity error for {repo_url}: {str(ie)}")) continue except requests.RequestException as re: - self.stderr.write( - self.style.ERROR(f"Network error while processing {repo_url}: {str(re)}") - ) + self.stderr.write(self.style.ERROR(f"Network error while processing {repo_url}: {str(re)}")) continue except CommandError as ce: - self.stderr.write( - self.style.ERROR(f"Command execution error for {repo_url}: {str(ce)}") - ) + self.stderr.write(self.style.ERROR(f"Command execution error for {repo_url}: {str(ce)}")) continue # Send Slack notification if new projects were found and Slack webhook is configured @@ -217,9 +194,7 @@ def fetch_all_repos(self, headers, per_page=100): try: response = requests.get(url, headers=headers, timeout=10) if response.status_code != 200: - self.stderr.write( - self.style.ERROR(f"Error: API returned status {response.status_code}") - ) + self.stderr.write(self.style.ERROR(f"Error: API returned status {response.status_code}")) break data = response.json() @@ -240,9 +215,7 @@ def fetch_all_repos(self, headers, per_page=100): return all_repos - def fetch_github_repo_data( - self, repo_url, headers, delay, max_retries, is_wiki=False, is_main=False - ): + def fetch_github_repo_data(self, repo_url, headers, delay, max_retries, is_wiki=False, is_main=False): match = re.match(r"https://github.com/([^/]+/[^/]+)", repo_url) if not match: return None @@ -252,26 +225,18 @@ def api_get(url): try: response = requests.get(url, headers=headers, timeout=10) if response.status_code in (403, 429): # Rate limit or forbidden - self.stderr.write( - self.style.WARNING( - f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}" - ) - ) + self.stderr.write(self.style.WARNING(f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}")) time.sleep(delay) continue return response except requests.exceptions.RequestException as e: self.stderr.write( - self.style.WARNING( - f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}" - ) + self.style.WARNING(f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}") ) time.sleep(delay) continue # After max retries, return None instead of raising exception - self.stderr.write( - self.style.WARNING(f"Failed to fetch {url} after {max_retries} attempts") - ) + self.stderr.write(self.style.WARNING(f"Failed to fetch {url} after {max_retries} attempts")) return None # Main repo data @@ -325,9 +290,7 @@ def parse_date_safely(date_string): "last_updated": last_updated, "watchers": repo_data.get("watchers_count", 0), "primary_language": repo_data.get("language", ""), - "license": ( - repo_data.get("license", {}).get("name") if repo_data.get("license") else None - ), + "license": (repo_data.get("license", {}).get("name") if repo_data.get("license") else None), "last_commit_date": last_commit_date, "created": created_date, "modified": modified_date, @@ -403,19 +366,13 @@ def api_get(url): try: response = requests.get(url, headers=headers, timeout=10) if response.status_code in (403, 429): - self.stderr.write( - self.style.WARNING( - f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}" - ) - ) + self.stderr.write(self.style.WARNING(f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}")) time.sleep(delay) continue return response except requests.exceptions.RequestException as e: self.stderr.write( - self.style.WARNING( - f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}" - ) + self.style.WARNING(f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}") ) time.sleep(delay) continue @@ -495,28 +452,18 @@ def handle_contributors(self, repo_instance, contributors_data): if not created: contributor_obj.name = contributor.get("login", contributor_obj.name) contributor_obj.github_url = contributor.get("html_url", contributor_obj.github_url) - contributor_obj.avatar_url = contributor.get( - "avatar_url", contributor_obj.avatar_url - ) - contributor_obj.contributor_type = contributor.get( - "type", contributor_obj.contributor_type - ) - contributor_obj.contributions = contributor.get( - "contributions", contributor_obj.contributions - ) + contributor_obj.avatar_url = contributor.get("avatar_url", contributor_obj.avatar_url) + contributor_obj.contributor_type = contributor.get("type", contributor_obj.contributor_type) + contributor_obj.contributions = contributor.get("contributions", contributor_obj.contributions) contributor_obj.save() contributor_instances.append(contributor_obj) - self.stdout.write( - self.style.SUCCESS(f" -> Added/Updated Contributor: {contributor_obj.name}") - ) + self.stdout.write(self.style.SUCCESS(f" -> Added/Updated Contributor: {contributor_obj.name}")) # Assign all contributors to the repo repo_instance.contributor.add(*contributor_instances) self.stdout.write( - self.style.SUCCESS( - f"Added {len(contributor_instances)} contributors to {repo_instance.name}" - ) + self.style.SUCCESS(f"Added {len(contributor_instances)} contributors to {repo_instance.name}") ) def send_slack_notification(self, new_projects, webhook_url): @@ -557,14 +504,10 @@ def send_slack_notification(self, new_projects, webhook_url): self.stdout.write(self.style.SUCCESS("Successfully sent Slack notification")) else: self.stderr.write( - self.style.WARNING( - f"Failed to send Slack notification. Status code: {response.status_code}" - ) + self.style.WARNING(f"Failed to send Slack notification. Status code: {response.status_code}") ) except requests.RequestException as re: - self.stderr.write( - self.style.ERROR(f"Network error sending Slack notification: {str(re)}") - ) + self.stderr.write(self.style.ERROR(f"Network error sending Slack notification: {str(re)}")) except ValueError as ve: self.stderr.write(self.style.ERROR(f"Invalid message format for Slack: {str(ve)}")) diff --git a/website/management/commands/check_trademarks.py b/website/management/commands/check_trademarks.py index b088059d2..c6fcfb937 100644 --- a/website/management/commands/check_trademarks.py +++ b/website/management/commands/check_trademarks.py @@ -86,9 +86,7 @@ def initialize_trademark_data(self, organizations): f"The last trademark check date for {organization.name} is updated to {organization.trademark_check_date}" ) organization.save() - self.stdout.write( - f"Initialized data for {organization.name}: Count = {organization.trademark_count}" - ) + self.stdout.write(f"Initialized data for {organization.name}: Count = {organization.trademark_count}") else: self.stderr.write(f"Failed to fetch trademark data for {organization.name}.") @@ -111,9 +109,7 @@ def rate_limited_check(self): if response_data: new_trademark_count = response_data.get("count", 0) if new_trademark_count > organization.trademark_count: - self.stdout.write( - f"New trademarks found for {organization.name}: {new_trademark_count}" - ) + self.stdout.write(f"New trademarks found for {organization.name}: {new_trademark_count}") organization.trademark_count = new_trademark_count organization.trademark_check_date = now() organization.save() diff --git a/website/management/commands/fetch_contributor_stats.py b/website/management/commands/fetch_contributor_stats.py index 79568b756..b50a7ebbd 100644 --- a/website/management/commands/fetch_contributor_stats.py +++ b/website/management/commands/fetch_contributor_stats.py @@ -34,8 +34,7 @@ def handle(self, **options): data_types = ["pulls", "issuesopen", "issuesclosed", "commits", "comments"] with ThreadPoolExecutor(max_workers=5) as executor: futures = [ - executor.submit(self.fetch_and_update_data, data_type, headers, owner, repo) - for data_type in data_types + executor.submit(self.fetch_and_update_data, data_type, headers, owner, repo) for data_type in data_types ] for future in futures: future.result() # Wait for all tasks to complete @@ -57,9 +56,7 @@ def fetch_and_update_data(self, data_type, headers, owner, repo): while url: response = requests.get(url, headers=headers) if response.status_code != 200: - self.stdout.write( - self.style.ERROR(f"Error fetching {data_type}: {response.json()}") - ) + self.stdout.write(self.style.ERROR(f"Error fetching {data_type}: {response.json()}")) break data = response.json() if not data: @@ -128,9 +125,7 @@ def fetch_and_update_data(self, data_type, headers, owner, repo): contribution_type="issue_closed", github_id=str(item["id"]), github_url=item["html_url"], - created=datetime.strptime( - item.get("closed_at") or item["created_at"], "%Y-%m-%dT%H:%M:%SZ" - ), + created=datetime.strptime(item.get("closed_at") or item["created_at"], "%Y-%m-%dT%H:%M:%SZ"), status="closed", repository=project, ) @@ -147,9 +142,7 @@ def fetch_and_update_data(self, data_type, headers, owner, repo): contribution_type="commit", github_id=item["sha"], github_url=item["html_url"], - created=datetime.strptime( - item["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ" - ), + created=datetime.strptime(item["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ"), repository=project, ) ) diff --git a/website/management/commands/fetch_contributors.py b/website/management/commands/fetch_contributors.py index 8548af965..62b9c6f7f 100644 --- a/website/management/commands/fetch_contributors.py +++ b/website/management/commands/fetch_contributors.py @@ -85,6 +85,4 @@ def handle(self, *args, **kwargs): project.contributor_count = len(contributors) project.save() - self.stdout.write( - self.style.SUCCESS(f"Successfully fetched contributors for project {project.name}") - ) + self.stdout.write(self.style.SUCCESS(f"Successfully fetched contributors for project {project.name}")) diff --git a/website/management/commands/fetch_trademarks.py b/website/management/commands/fetch_trademarks.py new file mode 100644 index 000000000..5d5e6e16d --- /dev/null +++ b/website/management/commands/fetch_trademarks.py @@ -0,0 +1,109 @@ +import time + +import requests +from django.conf import settings +from django.core.management.base import BaseCommand +from django.utils import timezone + +from website.models import Organization, Trademark, TrademarkOwner + + +class Command(BaseCommand): + help = "Fetch trademark information for organizations and store it in the database" + + def handle(self, *args, **kwargs): + organizations = Organization.objects.all() + + for organization in organizations: + name = organization.name + retries = 3 # Number of retries + while retries > 0: + try: + # Logging start of data fetching + self.stdout.write(self.style.NOTICE(f"Starting data fetch for organization: {name}")) + + # Fetch trademark data + url = "https://uspto-trademark.p.rapidapi.com/v1/batchTrademarkSearch/" + initial_payload = { + "keywords": f' ["{name}"]', + "start_index": "0", + } + headers = { + "x-rapidapi-key": f"{settings.USPTO_API}", + "x-rapidapi-host": "uspto-trademark.p.rapidapi.com", + "Content-Type": "application/x-www-form-urlencoded", + } + response = requests.post(url, data=initial_payload, headers=headers) + response.raise_for_status() + response_json = response.json() + + # The initial call returns a scroll_id, which is then used to obtain pagination results + scroll_id = response_json.get("scroll_id") + pagination_payload = { + "keywords": f' ["{name}"]', + "start_index": "0", + "scroll_id": scroll_id, + } + response = requests.post(url, data=pagination_payload, headers=headers) + response.raise_for_status() + results = response.json().get("results") + + # Store trademark data in the database + if results: + for item in results: + trademark, created = Trademark.objects.update_or_create( + keyword=item["keyword"], + registration_number=item.get("registration_number"), + serial_number=item.get("serial_number"), + status_label=item.get("status_label"), + status_code=item.get("status_code"), + status_date=item.get("status_date"), + status_definition=item.get("status_definition"), + filing_date=item.get("filing_date"), + registration_date=item.get("registration_date"), + abandonment_date=item.get("abandonment_date"), + expiration_date=item.get("expiration_date"), + description=item.get("description"), + organization=organization, + ) + + # Update or create owners + if item.get("owners"): + for owner_data in item["owners"]: + owner, owner_created = TrademarkOwner.objects.update_or_create( + name=owner_data.get("name"), + address1=owner_data.get("address1"), + address2=owner_data.get("address2"), + city=owner_data.get("city"), + state=owner_data.get("state"), + country=owner_data.get("country"), + postcode=owner_data.get("postcode"), + owner_type=owner_data.get("owner_type"), + owner_label=owner_data.get("owner_label"), + legal_entity_type=owner_data.get("legal_entity_type"), + legal_entity_type_label=owner_data.get("legal_entity_type_label"), + ) + trademark.owners.add(owner) + + organization.trademark_check_date = timezone.now() + organization.trademark_count = results and len(results) or 0 + organization.save() + + self.stdout.write(self.style.SUCCESS(f"Successfully stored data for organization: {name}")) + + # Introduced delay between requests to avoid rate limiting + time.sleep(2) + + break + except requests.exceptions.RequestException as e: + retries -= 1 + if retries == 0: + self.stdout.write(self.style.ERROR(f"Failed to fetch data for {name}: {e}")) + else: + # Retry after a delay if rate limited + self.stdout.write( + self.style.WARNING(f"Retrying for {name} due to {e}. Retries left: {retries}") + ) + time.sleep(5) + + self.stdout.write(self.style.SUCCESS("Successfully fetched and stored trademark data for all organizations")) diff --git a/website/management/commands/initsuperuser.py b/website/management/commands/initsuperuser.py index 8e9b70275..43160ea53 100644 --- a/website/management/commands/initsuperuser.py +++ b/website/management/commands/initsuperuser.py @@ -10,7 +10,5 @@ def handle(self, *args, **options): EMAIL = user[1] PASSWORD = user[2] print("Creating superuser for %s (%s)" % (USERNAME, EMAIL)) - superuser = User.objects.create_superuser( - username=USERNAME, email=EMAIL, password=PASSWORD - ) + superuser = User.objects.create_superuser(username=USERNAME, email=EMAIL, password=PASSWORD) superuser.save() diff --git a/website/management/commands/owasp_project_upload.py b/website/management/commands/owasp_project_upload.py index ee11d3414..a56da0269 100644 --- a/website/management/commands/owasp_project_upload.py +++ b/website/management/commands/owasp_project_upload.py @@ -44,9 +44,7 @@ def handle(self, *args, **options): # Check if GITHUB_TOKEN is set github_token = getattr(settings, "GITHUB_TOKEN", None) if not github_token: - self.stderr.write( - self.style.ERROR("GITHUB_TOKEN is not configured in settings. Aborting.") - ) + self.stderr.write(self.style.ERROR("GITHUB_TOKEN is not configured in settings. Aborting.")) return headers = { @@ -55,20 +53,14 @@ def handle(self, *args, **options): } # Get or create OWASP organization - org, created = Organization.objects.get_or_create( - name__iexact="OWASP", defaults={"name": "OWASP"} - ) + org, created = Organization.objects.get_or_create(name__iexact="OWASP", defaults={"name": "OWASP"}) if created: self.stdout.write(self.style.SUCCESS(f"Created Organization: {org.name}")) else: self.stdout.write(self.style.SUCCESS(f"Found Organization: {org.name}")) # Prompt user for confirmation - confirm = ( - input(f"Do you want to add projects to the organization '{org.name}'? (yes/no): ") - .strip() - .lower() - ) + confirm = input(f"Do you want to add projects to the organization '{org.name}'? (yes/no): ").strip().lower() if confirm not in ["yes", "y"]: self.stdout.write(self.style.WARNING("Operation cancelled by the user.")) return @@ -100,9 +92,7 @@ def handle(self, *args, **options): self.stdout.write(self.style.WARNING("CSV file is empty. No projects to add.")) return - self.stdout.write( - self.style.NOTICE(f"Processing {len(rows)} projects from the CSV file...") - ) + self.stdout.write(self.style.NOTICE(f"Processing {len(rows)} projects from the CSV file...")) project_count = 0 @@ -127,11 +117,7 @@ def clean_github_url(url): repo_field = row.get("Repo", "").strip() website_url = row.get("Website URL", "").strip() code_urls_csv = row.get("Code URL", "").strip() - code_urls = [ - clean_github_url(url.strip()) - for url in re.split(r"[,\n]+", code_urls_csv) - if url.strip() - ] + code_urls = [clean_github_url(url.strip()) for url in re.split(r"[,\n]+", code_urls_csv) if url.strip()] # Filter out any empty strings after cleaning code_urls = [url for url in code_urls if url] # Remove duplicates that might occur after cleaning URLs @@ -148,13 +134,9 @@ def clean_github_url(url): repo_url = f"https://github.com/OWASP/{repo_field}" # Validate GitHub repo existence - if not self.validate_github_repo( - repo_url, headers, delay_on_rate_limit, max_rate_limit_retries - ): + if not self.validate_github_repo(repo_url, headers, delay_on_rate_limit, max_rate_limit_retries): self.stderr.write( - self.style.WARNING( - f"Invalid or inaccessible Repo URL: {repo_url}. Skipping row {row_index}." - ) + self.style.WARNING(f"Invalid or inaccessible Repo URL: {repo_url}. Skipping row {row_index}.") ) continue @@ -169,9 +151,7 @@ def clean_github_url(url): ) if not repo_info: self.stderr.write( - self.style.WARNING( - f"Failed to fetch complete data for {repo_url}. Skipping row {row_index}." - ) + self.style.WARNING(f"Failed to fetch complete data for {repo_url}. Skipping row {row_index}.") ) continue @@ -212,9 +192,7 @@ def clean_github_url(url): with transaction.atomic(): # Check if project already exists by URL or slug if Project.objects.filter( - models.Q(url=website_url) - | models.Q(url=repo_info.get("html_url")) - | models.Q(slug=project_slug) + models.Q(url=website_url) | models.Q(url=repo_info.get("html_url")) | models.Q(slug=project_slug) ).exists(): self.stdout.write( self.style.WARNING( @@ -235,36 +213,22 @@ def clean_github_url(url): organization=org, ) except IntegrityError: - self.stdout.write( - self.style.WARNING( - "Failed to create project due to duplicate data. Skipping..." - ) - ) + self.stdout.write(self.style.WARNING("Failed to create project due to duplicate data. Skipping...")) continue # Fetch and save the logo project_logo_url = repo_info.get("logo_url", "") if self.fetch_and_save_logo(project, project_logo_url, headers): - self.stdout.write( - self.style.SUCCESS( - f"Successfully fetched and saved logo for {project.name}" - ) - ) + self.stdout.write(self.style.SUCCESS(f"Successfully fetched and saved logo for {project.name}")) else: self.stdout.write(self.style.WARNING(f"No logo found for {project.name}")) - self.stdout.write( - self.style.SUCCESS(f"Updated project: {project.name} ({repo_url})") - ) + self.stdout.write(self.style.SUCCESS(f"Updated project: {project.name} ({repo_url})")) # Handle wiki repo try: repo = Repo.objects.get(repo_url=repo_url) - self.stdout.write( - self.style.WARNING( - f"Wiki repo {repo_url} already exists. Skipping creation..." - ) - ) + self.stdout.write(self.style.WARNING(f"Wiki repo {repo_url} already exists. Skipping creation...")) except Repo.DoesNotExist: try: repo = Repo.objects.create( @@ -279,9 +243,7 @@ def clean_github_url(url): stars=repo_info.get("stars", 0), forks=repo_info.get("forks", 0), last_updated=( - parse_datetime(repo_info.get("last_updated")) - if repo_info.get("last_updated") - else None + parse_datetime(repo_info.get("last_updated")) if repo_info.get("last_updated") else None ), watchers=repo_info.get("watchers", 0), primary_language=repo_info.get("primary_language", ""), @@ -310,28 +272,20 @@ def clean_github_url(url): ) except IntegrityError: self.stdout.write( - self.style.WARNING( - "Failed to create wiki repo due to duplicate data. Skipping..." - ) + self.style.WARNING("Failed to create wiki repo due to duplicate data. Skipping...") ) continue # Handle additional repos for idx, code_url in enumerate(code_urls, start=1): if not code_url.startswith("https://github.com/"): - self.stderr.write( - self.style.WARNING(f"Invalid Code URL: {code_url}. Skipping.") - ) + self.stderr.write(self.style.WARNING(f"Invalid Code URL: {code_url}. Skipping.")) continue # Validate Code Repo URL - if not self.validate_github_repo( - code_url, headers, delay_on_rate_limit, max_rate_limit_retries - ): + if not self.validate_github_repo(code_url, headers, delay_on_rate_limit, max_rate_limit_retries): self.stderr.write( - self.style.WARNING( - f"Invalid or inaccessible Code Repo URL: {code_url}. Skipping." - ) + self.style.WARNING(f"Invalid or inaccessible Code Repo URL: {code_url}. Skipping.") ) continue @@ -347,18 +301,14 @@ def clean_github_url(url): if not code_repo_info: self.stderr.write( - self.style.WARNING( - f"Failed to fetch complete data for {code_url}. Skipping." - ) + self.style.WARNING(f"Failed to fetch complete data for {code_url}. Skipping.") ) continue try: code_repo = Repo.objects.get(repo_url=code_url) self.stdout.write( - self.style.WARNING( - f"Code repo {code_url} already exists. Skipping creation..." - ) + self.style.WARNING(f"Code repo {code_url} already exists. Skipping creation...") ) continue except Repo.DoesNotExist: @@ -424,9 +374,7 @@ def clean_github_url(url): ) except IntegrityError: self.stdout.write( - self.style.WARNING( - "Failed to create code repo due to duplicate data. Skipping..." - ) + self.style.WARNING("Failed to create code repo due to duplicate data. Skipping...") ) continue @@ -436,9 +384,7 @@ def clean_github_url(url): if not code_repo: self.stderr.write( - self.style.WARNING( - f"Failed to create/update Code Repo for {code_url}. Skipping." - ) + self.style.WARNING(f"Failed to create/update Code Repo for {code_url}. Skipping.") ) continue @@ -453,17 +399,11 @@ def clean_github_url(url): if code_contributors_data: self.handle_contributors(code_repo, code_contributors_data) - self.stdout.write( - self.style.SUCCESS( - f" -> Added/Updated Repo: {code_repo.name} ({code_url})" - ) - ) + self.stdout.write(self.style.SUCCESS(f" -> Added/Updated Repo: {code_repo.name} ({code_url})")) project_count += 1 - self.stdout.write( - self.style.SUCCESS(f"Import completed. Processed {project_count} projects.") - ) + self.stdout.write(self.style.SUCCESS(f"Import completed. Processed {project_count} projects.")) def validate_github_repo(self, repo_url, headers, delay, max_retries): """Check if a GitHub repository exists.""" @@ -477,18 +417,14 @@ def validate_github_repo(self, repo_url, headers, delay, max_retries): if response.status_code == 200: return True elif response.status_code in [403, 429]: - self.stderr.write( - self.style.WARNING(f"Rate limit reached. Waiting for {delay} seconds...") - ) + self.stderr.write(self.style.WARNING(f"Rate limit reached. Waiting for {delay} seconds...")) time.sleep(delay) continue elif response.status_code == 404: return False else: self.stderr.write( - self.style.WARNING( - f"Unexpected status code {response.status_code} for URL: {repo_url}" - ) + self.style.WARNING(f"Unexpected status code {response.status_code} for URL: {repo_url}") ) return False except requests.exceptions.RequestException as e: @@ -513,9 +449,7 @@ def convert_to_api_url(self, repo_url): self.stderr.write(self.style.WARNING(f"Invalid GitHub URL format: {repo_url}")) return None - def fetch_github_repo_data( - self, repo_url, headers, delay, max_retries, is_wiki=False, is_main=False - ): + def fetch_github_repo_data(self, repo_url, headers, delay, max_retries, is_wiki=False, is_main=False): match = re.match(r"https://github.com/([^/]+/[^/]+)", repo_url) if not match: return None @@ -525,26 +459,18 @@ def api_get(url): try: response = requests.get(url, headers=headers, timeout=10) if response.status_code in (403, 429): # Rate limit or forbidden - self.stderr.write( - self.style.WARNING( - f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}" - ) - ) + self.stderr.write(self.style.WARNING(f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}")) time.sleep(delay) continue return response except requests.exceptions.RequestException as e: self.stderr.write( - self.style.WARNING( - f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}" - ) + self.style.WARNING(f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}") ) time.sleep(delay) continue # After max retries, return None instead of raising exception - self.stderr.write( - self.style.WARNING(f"Failed to fetch {url} after {max_retries} attempts") - ) + self.stderr.write(self.style.WARNING(f"Failed to fetch {url} after {max_retries} attempts")) return None # Main repo data @@ -574,9 +500,7 @@ def api_get(url): "last_updated": repo_data.get("updated_at"), "watchers": repo_data.get("watchers_count", 0), "primary_language": repo_data.get("language", ""), - "license": ( - repo_data.get("license", {}).get("name") if repo_data.get("license") else None - ), + "license": (repo_data.get("license", {}).get("name") if repo_data.get("license") else None), "last_commit_date": repo_data.get("pushed_at"), "created": repo_data.get("created_at", ""), "modified": repo_data.get("updated_at", ""), @@ -652,19 +576,13 @@ def api_get(url): try: response = requests.get(url, headers=headers, timeout=10) if response.status_code in (403, 429): - self.stderr.write( - self.style.WARNING( - f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}" - ) - ) + self.stderr.write(self.style.WARNING(f"Rate limit hit for {url}. Attempt {i+1}/{max_retries}")) time.sleep(delay) continue return response except requests.exceptions.RequestException as e: self.stderr.write( - self.style.WARNING( - f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}" - ) + self.style.WARNING(f"Request failed for {url}: {str(e)}. Attempt {i+1}/{max_retries}") ) time.sleep(delay) continue @@ -741,26 +659,16 @@ def handle_contributors(self, repo_instance, contributors_data): if not created: contributor_obj.name = contributor.get("login", contributor_obj.name) contributor_obj.github_url = contributor.get("html_url", contributor_obj.github_url) - contributor_obj.avatar_url = contributor.get( - "avatar_url", contributor_obj.avatar_url - ) - contributor_obj.contributor_type = contributor.get( - "type", contributor_obj.contributor_type - ) - contributor_obj.contributions = contributor.get( - "contributions", contributor_obj.contributions - ) + contributor_obj.avatar_url = contributor.get("avatar_url", contributor_obj.avatar_url) + contributor_obj.contributor_type = contributor.get("type", contributor_obj.contributor_type) + contributor_obj.contributions = contributor.get("contributions", contributor_obj.contributions) contributor_obj.save() contributor_instances.append(contributor_obj) - self.stdout.write( - self.style.SUCCESS(f" -> Added/Updated Contributor: {contributor_obj.name}") - ) + self.stdout.write(self.style.SUCCESS(f" -> Added/Updated Contributor: {contributor_obj.name}")) # Assign all contributors to the repo repo_instance.contributor.add(*contributor_instances) self.stdout.write( - self.style.SUCCESS( - f"Added {len(contributor_instances)} contributors to {repo_instance.name}" - ) + self.style.SUCCESS(f"Added {len(contributor_instances)} contributors to {repo_instance.name}") ) diff --git a/website/management/commands/slack_daily_timelogs.py b/website/management/commands/slack_daily_timelogs.py index ad2ba3e32..73cc1e5f1 100644 --- a/website/management/commands/slack_daily_timelogs.py +++ b/website/management/commands/slack_daily_timelogs.py @@ -15,9 +15,7 @@ def handle(self, *args, **kwargs): current_hour_utc = datetime.utcnow().hour # Fetch all Slack integrations with related integration data - slack_integrations = SlackIntegration.objects.select_related( - "integration__organization" - ).all() + slack_integrations = SlackIntegration.objects.select_related("integration__organization").all() for integration in slack_integrations: current_org = integration.integration.organization @@ -25,8 +23,7 @@ def handle(self, *args, **kwargs): integration.default_channel_id and current_org and integration.daily_updates - and integration.daily_update_time - == current_hour_utc # Ensure it's the correct hour + and integration.daily_update_time == current_hour_utc # Ensure it's the correct hour ): print(f"Processing updates for organization: {current_org.name}") @@ -46,14 +43,9 @@ def handle(self, *args, **kwargs): for timelog in timelog_history: st = timelog.start_time et = timelog.end_time - issue_url = ( - timelog.github_issue_url if timelog.github_issue_url else "No issue URL" - ) + issue_url = timelog.github_issue_url if timelog.github_issue_url else "No issue URL" summary_message += ( - f"Task: {timelog}\n" - f"Start: {st}\n" - f"End: {et}\n" - f"Issue URL: {issue_url}\n\n" + f"Task: {timelog}\n" f"Start: {st}\n" f"End: {et}\n" f"Issue URL: {issue_url}\n\n" ) total_time += et - st diff --git a/website/management/commands/update_contributor_stats.py b/website/management/commands/update_contributor_stats.py index d8566fbc5..e208f3b02 100644 --- a/website/management/commands/update_contributor_stats.py +++ b/website/management/commands/update_contributor_stats.py @@ -63,9 +63,7 @@ def parse_github_url(self, url): def delete_existing_daily_stats(self, repo, current_month_start): """Delete existing daily stats for the current month""" with transaction.atomic(): - ContributorStats.objects.filter( - repo=repo, granularity="day", date__gte=current_month_start - ).delete() + ContributorStats.objects.filter(repo=repo, granularity="day", date__gte=current_month_start).delete() def fetch_contributor_stats(self, owner, repo_name, start_date, end_date): """Fetch contributor statistics using GitHub REST API""" @@ -90,9 +88,7 @@ def get_paginated_data(url, params=None): continue if response.status_code != 200: - self.stdout.write( - self.style.WARNING(f"API error: {response.status_code} - {response.text}") - ) + self.stdout.write(self.style.WARNING(f"API error: {response.status_code} - {response.text}")) break data = response.json() @@ -124,9 +120,7 @@ def get_paginated_data(url, params=None): for commit in commits: if commit.get("author") and commit.get("commit", {}).get("author", {}).get("date"): - date = datetime.strptime( - commit["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + date = datetime.strptime(commit["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ").date() login = commit["author"].get("login") if login: self.increment_stat(stats, date, login, "commits") @@ -142,17 +136,13 @@ def get_paginated_data(url, params=None): login = issue["user"]["login"] # Handle issue creation - created_date = datetime.strptime( - issue["created_at"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + created_date = datetime.strptime(issue["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() if start_date <= created_date <= end_date: self.increment_stat(stats, created_date, login, "issues_opened") # Handle issue closure if issue.get("closed_at"): - closed_date = datetime.strptime( - issue["closed_at"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + closed_date = datetime.strptime(issue["closed_at"], "%Y-%m-%dT%H:%M:%SZ").date() if start_date <= closed_date <= end_date: self.increment_stat(stats, closed_date, login, "issues_closed") @@ -176,9 +166,7 @@ def get_paginated_data(url, params=None): for comment in comments: if comment.get("user", {}).get("login"): login = comment["user"]["login"] - comment_date = datetime.strptime( - comment["created_at"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + comment_date = datetime.strptime(comment["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() if start_date <= comment_date <= end_date: self.increment_stat(stats, comment_date, login, "comments") @@ -216,11 +204,7 @@ def update_monthly_stats(self, repo, start_date): owner, repo_name = self.parse_github_url(repo.repo_url) # Get the last monthly stat to know where to start from - last_monthly_stat = ( - ContributorStats.objects.filter(repo=repo, granularity="month") - .order_by("-date") - .first() - ) + last_monthly_stat = ContributorStats.objects.filter(repo=repo, granularity="month").order_by("-date").first() if last_monthly_stat: # Start from the month after the last stored monthly stat @@ -236,15 +220,11 @@ def update_monthly_stats(self, repo, start_date): response = requests.get(repo_api_url, headers=headers) if response.status_code != 200: - self.stdout.write( - self.style.ERROR(f"Failed to fetch repo data: {response.text}") - ) + self.stdout.write(self.style.ERROR(f"Failed to fetch repo data: {response.text}")) return repo_data = response.json() - repo_created_at = datetime.strptime( - repo_data["created_at"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + repo_created_at = datetime.strptime(repo_data["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() except Exception as e: self.stdout.write(self.style.ERROR(f"Error fetching repo creation date: {str(e)}")) return @@ -261,9 +241,7 @@ def update_monthly_stats(self, repo, start_date): self.stdout.write(f"Fetching stats for month: {current_month_start} to {month_end}") - monthly_stats = self.fetch_monthly_contributor_stats( - owner, repo_name, current_month_start, month_end - ) + monthly_stats = self.fetch_monthly_contributor_stats(owner, repo_name, current_month_start, month_end) if monthly_stats: self.store_monthly_stats(repo, current_month_start, monthly_stats) @@ -293,9 +271,7 @@ def get_paginated_data(url, params=None): continue if response.status_code != 200: - self.stdout.write( - self.style.WARNING(f"API error: {response.status_code} - {response.text}") - ) + self.stdout.write(self.style.WARNING(f"API error: {response.status_code} - {response.text}")) break data = response.json() @@ -343,9 +319,7 @@ def get_paginated_data(url, params=None): # Process commits for commit in commits: if commit.get("author") and commit.get("commit", {}).get("author", {}).get("date"): - date = datetime.strptime( - commit["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + date = datetime.strptime(commit["commit"]["author"]["date"], "%Y-%m-%dT%H:%M:%SZ").date() if month_start <= date <= month_end: login = commit["author"].get("login") if login: @@ -358,16 +332,12 @@ def get_paginated_data(url, params=None): if not login: continue - created_date = datetime.strptime( - issue["created_at"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + created_date = datetime.strptime(issue["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() if month_start <= created_date <= month_end: self.increment_monthly_stat(monthly_stats, login, "issues_opened") if issue.get("closed_at"): - closed_date = datetime.strptime( - issue["closed_at"], "%Y-%m-%dT%H:%M:%SZ" - ).date() + closed_date = datetime.strptime(issue["closed_at"], "%Y-%m-%dT%H:%M:%SZ").date() if month_start <= closed_date <= month_end: self.increment_monthly_stat(monthly_stats, login, "issues_closed") @@ -376,9 +346,7 @@ def get_paginated_data(url, params=None): login = pr.get("user", {}).get("login") if ( login - and month_start - <= datetime.strptime(pr["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() - <= month_end + and month_start <= datetime.strptime(pr["created_at"], "%Y-%m-%dT%H:%M:%SZ").date() <= month_end ): self.increment_monthly_stat(monthly_stats, login, "pull_requests") @@ -415,9 +383,7 @@ def store_monthly_stats(self, repo, month_start, monthly_stats): """Store monthly statistics in the database""" with transaction.atomic(): # Delete existing monthly stat for this month if exists - ContributorStats.objects.filter( - repo=repo, granularity="month", date=month_start - ).delete() + ContributorStats.objects.filter(repo=repo, granularity="month", date=month_start).delete() # Create new monthly stats for login, stats in monthly_stats.items(): diff --git a/website/management/commands/update_faiss.py b/website/management/commands/update_faiss.py index bda01a2df..d3731bdd2 100644 --- a/website/management/commands/update_faiss.py +++ b/website/management/commands/update_faiss.py @@ -25,9 +25,7 @@ def handle(self, *args, **kwargs): # Check if the documents directory exists if not documents_dir.exists(): - self.stdout.write( - self.style.ERROR(f"Documents directory does not exist: {documents_dir}") - ) + self.stdout.write(self.style.ERROR(f"Documents directory does not exist: {documents_dir}")) return None # Load the list of already processed files diff --git a/website/management/commands/update_projects.py b/website/management/commands/update_projects.py index 52503c564..24148113e 100644 --- a/website/management/commands/update_projects.py +++ b/website/management/commands/update_projects.py @@ -67,15 +67,9 @@ def get_issue_count(repo_name, query, headers): return 0 project.open_issues = get_issue_count(repo_name, "type:issue+state:open", headers) - project.closed_issues = get_issue_count( - repo_name, "type:issue+state:closed", headers - ) - project.open_pull_requests = get_issue_count( - repo_name, "type:pr+state:open", headers - ) - project.closed_pull_requests = get_issue_count( - repo_name, "type:pr+state:closed", headers - ) + project.closed_issues = get_issue_count(repo_name, "type:issue+state:closed", headers) + project.open_pull_requests = get_issue_count(repo_name, "type:pr+state:open", headers) + project.closed_pull_requests = get_issue_count(repo_name, "type:pr+state:closed", headers) # Fetch latest release url = f"https://api.github.com/repos/{repo_name}/releases/latest" @@ -85,11 +79,7 @@ def get_issue_count(repo_name, query, headers): project.release_name = release_data.get("name") or release_data.get("tag_name") project.release_datetime = parse_datetime(release_data.get("published_at")) else: - self.stdout.write( - self.style.WARNING( - f"No releases found for {repo_name}: {response.status_code}" - ) - ) + self.stdout.write(self.style.WARNING(f"No releases found for {repo_name}: {response.status_code}")) page = 1 commit_count = 0 @@ -100,15 +90,11 @@ def get_issue_count(repo_name, query, headers): contributors_data = response.json() if not contributors_data: break - commit_count += sum( - contributor.get("contributions", 0) for contributor in contributors_data - ) + commit_count += sum(contributor.get("contributions", 0) for contributor in contributors_data) page += 1 else: self.stdout.write( - self.style.WARNING( - f"Failed to fetch contributors for {repo_name}: {response.status_code}" - ) + self.style.WARNING(f"Failed to fetch contributors for {repo_name}: {response.status_code}") ) break project.commit_count = commit_count @@ -116,9 +102,7 @@ def get_issue_count(repo_name, query, headers): else: self.stdout.write( - self.style.WARNING( - f"Failed to fetch repository data for {repo_name}: {response.status_code}" - ) + self.style.WARNING(f"Failed to fetch repository data for {repo_name}: {response.status_code}") ) continue # Skip to next project diff --git a/website/migrations/0001_initial.py b/website/migrations/0001_initial.py index d07bedaf3..4d04aa2ae 100644 --- a/website/migrations/0001_initial.py +++ b/website/migrations/0001_initial.py @@ -15,9 +15,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("when", models.DateTimeField(auto_now_add=True, verbose_name=b"date created")), ], diff --git a/website/migrations/0002_auto_20160828_0116.py b/website/migrations/0002_auto_20160828_0116.py index 468fa5e0c..5fb48f7d4 100644 --- a/website/migrations/0002_auto_20160828_0116.py +++ b/website/migrations/0002_auto_20160828_0116.py @@ -18,9 +18,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("url", models.URLField()), ("description", models.TextField()), @@ -29,9 +27,7 @@ class Migration(migrations.Migration): ("modified", models.DateTimeField(auto_now=True)), ( "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ], ), diff --git a/website/migrations/0003_auto_20160831_2326.py b/website/migrations/0003_auto_20160831_2326.py index 5089691f1..82a56c674 100644 --- a/website/migrations/0003_auto_20160831_2326.py +++ b/website/migrations/0003_auto_20160831_2326.py @@ -18,9 +18,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("score", models.IntegerField()), ], @@ -33,15 +31,11 @@ class Migration(migrations.Migration): migrations.AddField( model_name="points", name="issue", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="website.Issue" - ), + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="website.Issue"), ), migrations.AddField( model_name="points", name="user", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL - ), + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ] diff --git a/website/migrations/0004_auto_20160903_2344.py b/website/migrations/0004_auto_20160903_2344.py index cc90af1d8..0c442c17f 100644 --- a/website/migrations/0004_auto_20160903_2344.py +++ b/website/migrations/0004_auto_20160903_2344.py @@ -18,9 +18,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("url", models.URLField()), ("prize", models.IntegerField()), @@ -30,9 +28,7 @@ class Migration(migrations.Migration): ("modified", models.DateTimeField(auto_now=True)), ( "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ], ), diff --git a/website/migrations/0006_auto_20160920_1713.py b/website/migrations/0006_auto_20160920_1713.py index 33b12d493..6669a6705 100644 --- a/website/migrations/0006_auto_20160920_1713.py +++ b/website/migrations/0006_auto_20160920_1713.py @@ -15,9 +15,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("name", models.TextField()), ("url", models.URLField()), diff --git a/website/migrations/0014_auto_20161113_1417.py b/website/migrations/0014_auto_20161113_1417.py index c758e5699..5b539f465 100644 --- a/website/migrations/0014_auto_20161113_1417.py +++ b/website/migrations/0014_auto_20161113_1417.py @@ -15,8 +15,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="issue", name="screenshot", - field=models.ImageField( - upload_to=b"screenshots", validators=[website.models.validate_image] - ), + field=models.ImageField(upload_to=b"screenshots", validators=[website.models.validate_image]), ), ] diff --git a/website/migrations/0023_invitefriend.py b/website/migrations/0023_invitefriend.py index ee2c9cf4b..316f93cbf 100644 --- a/website/migrations/0023_invitefriend.py +++ b/website/migrations/0023_invitefriend.py @@ -18,17 +18,13 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("recipient", models.EmailField(max_length=254)), ("sent", models.DateTimeField(auto_now_add=True, db_index=True)), ( "sender", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ], options={ diff --git a/website/migrations/0024_userprofile.py b/website/migrations/0024_userprofile.py index d7661c59e..f7a9ed3f2 100644 --- a/website/migrations/0024_userprofile.py +++ b/website/migrations/0024_userprofile.py @@ -26,9 +26,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("user_avatar", models.ImageField(blank=True, null=True, upload_to=b"avatars/")), ( diff --git a/website/migrations/0025_auto_20170605_1909.py b/website/migrations/0025_auto_20170605_1909.py index e82d150ac..1de3e4c4a 100644 --- a/website/migrations/0025_auto_20170605_1909.py +++ b/website/migrations/0025_auto_20170605_1909.py @@ -19,9 +19,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("author", models.CharField(max_length=200)), ("author_url", models.CharField(max_length=200)), @@ -41,8 +39,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="userprofile", name="user_avatar", - field=models.ImageField( - blank=True, null=True, upload_to=website.models.user_images_path - ), + field=models.ImageField(blank=True, null=True, upload_to=website.models.user_images_path), ), ] diff --git a/website/migrations/0037_auto_20170813_0319.py b/website/migrations/0037_auto_20170813_0319.py index 64add128f..9de6334f0 100644 --- a/website/migrations/0037_auto_20170813_0319.py +++ b/website/migrations/0037_auto_20170813_0319.py @@ -13,8 +13,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="userprofile", name="follows", - field=models.ManyToManyField( - blank=True, related_name="follower", to="website.UserProfile" - ), + field=models.ManyToManyField(blank=True, related_name="follower", to="website.UserProfile"), ), ] diff --git a/website/migrations/0044_auto_20170907_1605.py b/website/migrations/0044_auto_20170907_1605.py index 908b63848..2203563ca 100644 --- a/website/migrations/0044_auto_20170907_1605.py +++ b/website/migrations/0044_auto_20170907_1605.py @@ -17,8 +17,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name="userprofile", name="issue_upvoted", - field=models.ManyToManyField( - blank=True, null=True, related_name="upvoted", to="website.Issue" - ), + field=models.ManyToManyField(blank=True, null=True, related_name="upvoted", to="website.Issue"), ), ] diff --git a/website/migrations/0045_auto_20180314_2032.py b/website/migrations/0045_auto_20180314_2032.py index 8a54fad6b..1f393fdbe 100644 --- a/website/migrations/0045_auto_20180314_2032.py +++ b/website/migrations/0045_auto_20180314_2032.py @@ -15,9 +15,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("address", models.CharField(blank=True, max_length=25, null=True)), ("user", models.CharField(blank=True, max_length=25, null=True)), diff --git a/website/migrations/0046_auto_20180630_0848.py b/website/migrations/0046_auto_20180630_0848.py index d73c48c31..36fedb110 100644 --- a/website/migrations/0046_auto_20180630_0848.py +++ b/website/migrations/0046_auto_20180630_0848.py @@ -60,9 +60,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="issue", name="screenshot", - field=models.ImageField( - upload_to="screenshots", validators=[website.models.validate_image] - ), + field=models.ImageField(upload_to="screenshots", validators=[website.models.validate_image]), ), migrations.AlterField( model_name="issue", diff --git a/website/migrations/0047_auto_20200613_0814.py b/website/migrations/0047_auto_20200613_0814.py index 3df33eaf3..c409657b0 100644 --- a/website/migrations/0047_auto_20200613_0814.py +++ b/website/migrations/0047_auto_20200613_0814.py @@ -17,9 +17,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("name", models.CharField(blank=True, max_length=25)), ("charge_per_month", models.IntegerField(blank=True)), @@ -48,9 +46,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="hunt", name="domain", - field=models.ForeignKey( - default=1, on_delete=django.db.models.deletion.CASCADE, to="website.Domain" - ), + field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to="website.Domain"), preserve_default=False, ), migrations.CreateModel( @@ -58,9 +54,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("is_active", models.BooleanField(default=True)), ( diff --git a/website/migrations/0052_auto_20200619_0540.py b/website/migrations/0052_auto_20200619_0540.py index 34ebe8a54..7d691d6f3 100644 --- a/website/migrations/0052_auto_20200619_0540.py +++ b/website/migrations/0052_auto_20200619_0540.py @@ -36,9 +36,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("name", models.CharField(max_length=255, unique=True)), ("url", models.URLField()), diff --git a/website/migrations/0053_auto_20200619_0551.py b/website/migrations/0053_auto_20200619_0551.py index 352863f7c..cb5a34c1a 100644 --- a/website/migrations/0053_auto_20200619_0551.py +++ b/website/migrations/0053_auto_20200619_0551.py @@ -17,9 +17,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ( "role", diff --git a/website/migrations/0059_transaction_wallet.py b/website/migrations/0059_transaction_wallet.py index b1146d46d..eb9c6a140 100644 --- a/website/migrations/0059_transaction_wallet.py +++ b/website/migrations/0059_transaction_wallet.py @@ -17,17 +17,13 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("current_balance", models.DecimalField(decimal_places=2, default=0, max_digits=6)), ("created_at", models.DateTimeField(auto_now_add=True)), ( "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ], ), @@ -36,18 +32,14 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("value", models.DecimalField(decimal_places=2, max_digits=6)), ("running_balance", models.DecimalField(decimal_places=2, max_digits=6)), ("created_at", models.DateTimeField(auto_now_add=True)), ( "wallet", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="website.Wallet" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="website.Wallet"), ), ], ), diff --git a/website/migrations/0061_payment.py b/website/migrations/0061_payment.py index c76b8c4c9..1e4e61f1a 100644 --- a/website/migrations/0061_payment.py +++ b/website/migrations/0061_payment.py @@ -15,17 +15,13 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ("value", models.DecimalField(decimal_places=2, max_digits=6)), ("active", models.BooleanField(default=True)), ( "wallet", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="website.Wallet" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="website.Wallet"), ), ], ), diff --git a/website/migrations/0066_auto_20200827_1733.py b/website/migrations/0066_auto_20200827_1733.py index 5e18f99ee..c95a55516 100644 --- a/website/migrations/0066_auto_20200827_1733.py +++ b/website/migrations/0066_auto_20200827_1733.py @@ -22,9 +22,7 @@ class Migration(migrations.Migration): fields=[ ( "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), ), ( "user", diff --git a/website/migrations/0070_alter_issue_label_issuescreenshot.py b/website/migrations/0070_alter_issue_label_issuescreenshot.py index ed3aaf0c5..c1f5ad8c6 100644 --- a/website/migrations/0070_alter_issue_label_issuescreenshot.py +++ b/website/migrations/0070_alter_issue_label_issuescreenshot.py @@ -50,9 +50,7 @@ class Migration(migrations.Migration): ), ( "issue", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="website.issue" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="website.issue"), ), ], ), diff --git a/website/migrations/0074_company_company_id_company_managers_domain_managers_and_more.py b/website/migrations/0074_company_company_id_company_managers_domain_managers_and_more.py index dcf4b9edc..8c68dbcfd 100644 --- a/website/migrations/0074_company_company_id_company_managers_domain_managers_and_more.py +++ b/website/migrations/0074_company_company_id_company_managers_domain_managers_and_more.py @@ -26,9 +26,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="company", name="managers", - field=models.ManyToManyField( - related_name="user_companies", to=settings.AUTH_USER_MODEL - ), + field=models.ManyToManyField(related_name="user_companies", to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name="domain", diff --git a/website/migrations/0077_hunt_banner_huntprize.py b/website/migrations/0077_hunt_banner_huntprize.py index aab0e8412..196f12220 100644 --- a/website/migrations/0077_hunt_banner_huntprize.py +++ b/website/migrations/0077_hunt_banner_huntprize.py @@ -35,9 +35,7 @@ class Migration(migrations.Migration): ("description", models.TextField(blank=True, null=True)), ( "hunt", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="website.hunt" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="website.hunt"), ), ], ), diff --git a/website/migrations/0079_userprofile_crypto_address_and_more.py b/website/migrations/0079_userprofile_crypto_address_and_more.py index 60e4dbf85..95655ea04 100644 --- a/website/migrations/0079_userprofile_crypto_address_and_more.py +++ b/website/migrations/0079_userprofile_crypto_address_and_more.py @@ -19,15 +19,11 @@ class Migration(migrations.Migration): migrations.AddField( model_name="userprofile", name="subscribed_domains", - field=models.ManyToManyField( - related_name="user_subscribed_domains", to="website.domain" - ), + field=models.ManyToManyField(related_name="user_subscribed_domains", to="website.domain"), ), migrations.AddField( model_name="userprofile", name="subscribed_users", - field=models.ManyToManyField( - related_name="user_subscribed_users", to=settings.AUTH_USER_MODEL - ), + field=models.ManyToManyField(related_name="user_subscribed_users", to=settings.AUTH_USER_MODEL), ), ] diff --git a/website/migrations/0080_alter_issue_team_members.py b/website/migrations/0080_alter_issue_team_members.py index 9cac15112..de1133edb 100644 --- a/website/migrations/0080_alter_issue_team_members.py +++ b/website/migrations/0080_alter_issue_team_members.py @@ -14,8 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="issue", name="team_members", - field=models.ManyToManyField( - blank=True, related_name="reportmembers", to=settings.AUTH_USER_MODEL - ), + field=models.ManyToManyField(blank=True, related_name="reportmembers", to=settings.AUTH_USER_MODEL), ), ] diff --git a/website/migrations/0083_alter_invitefriend_options_and_more.py b/website/migrations/0083_alter_invitefriend_options_and_more.py index 7c97970e0..fc4e446a5 100644 --- a/website/migrations/0083_alter_invitefriend_options_and_more.py +++ b/website/migrations/0083_alter_invitefriend_options_and_more.py @@ -34,9 +34,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="invitefriend", name="recipients", - field=models.ManyToManyField( - blank=True, related_name="received_invites", to=settings.AUTH_USER_MODEL - ), + field=models.ManyToManyField(blank=True, related_name="received_invites", to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name="invitefriend", diff --git a/website/migrations/0090_alter_domain_managers.py b/website/migrations/0090_alter_domain_managers.py index 887a23752..8a44d6fe8 100644 --- a/website/migrations/0090_alter_domain_managers.py +++ b/website/migrations/0090_alter_domain_managers.py @@ -14,8 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="domain", name="managers", - field=models.ManyToManyField( - blank=True, related_name="user_domains", to=settings.AUTH_USER_MODEL - ), + field=models.ManyToManyField(blank=True, related_name="user_domains", to=settings.AUTH_USER_MODEL), ), ] diff --git a/website/migrations/0097_contributor_project_contributors.py b/website/migrations/0097_contributor_project_contributors.py index 5fd0bb3d8..c75593ec2 100644 --- a/website/migrations/0097_contributor_project_contributors.py +++ b/website/migrations/0097_contributor_project_contributors.py @@ -32,8 +32,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name="project", name="contributors", - field=models.ManyToManyField( - blank=True, null=True, related_name="projects", to="website.contributor" - ), + field=models.ManyToManyField(blank=True, null=True, related_name="projects", to="website.contributor"), ), ] diff --git a/website/migrations/0101_alter_bid_user.py b/website/migrations/0101_alter_bid_user.py index fed87d9c1..0b647e4fc 100644 --- a/website/migrations/0101_alter_bid_user.py +++ b/website/migrations/0101_alter_bid_user.py @@ -15,8 +15,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="bid", name="user", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL - ), + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ] diff --git a/website/migrations/0102_remove_bid_amount_bid_amount_bch_and_more.py b/website/migrations/0102_remove_bid_amount_bid_amount_bch_and_more.py index df170d762..5bce1db41 100644 --- a/website/migrations/0102_remove_bid_amount_bid_amount_bch_and_more.py +++ b/website/migrations/0102_remove_bid_amount_bid_amount_bch_and_more.py @@ -21,8 +21,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="project", name="contributors", - field=models.ManyToManyField( - blank=True, related_name="projects", to="website.contributor" - ), + field=models.ManyToManyField(blank=True, related_name="projects", to="website.contributor"), ), ] diff --git a/website/migrations/0103_contribution_bacontoken.py b/website/migrations/0103_contribution_bacontoken.py index c263a75a7..45bbe3a16 100644 --- a/website/migrations/0103_contribution_bacontoken.py +++ b/website/migrations/0103_contribution_bacontoken.py @@ -29,9 +29,7 @@ class Migration(migrations.Migration): ("date_created", models.DateTimeField(auto_now_add=True)), ( "status", - models.CharField( - choices=[("open", "Open"), ("closed", "Closed")], max_length=50 - ), + models.CharField(choices=[("open", "Open"), ("closed", "Closed")], max_length=50), ), ("txid", models.CharField(blank=True, max_length=64, null=True)), ( diff --git a/website/migrations/0126_alter_userprofile_subscribed_domains_and_more.py b/website/migrations/0126_alter_userprofile_subscribed_domains_and_more.py index 44515ca62..cf0f5a708 100644 --- a/website/migrations/0126_alter_userprofile_subscribed_domains_and_more.py +++ b/website/migrations/0126_alter_userprofile_subscribed_domains_and_more.py @@ -14,9 +14,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="userprofile", name="subscribed_domains", - field=models.ManyToManyField( - blank=True, related_name="user_subscribed_domains", to="website.domain" - ), + field=models.ManyToManyField(blank=True, related_name="user_subscribed_domains", to="website.domain"), ), migrations.AlterField( model_name="userprofile", diff --git a/website/migrations/0144_delete_contributorstats_and_more.py b/website/migrations/0144_delete_contributorstats_and_more.py index 9d664749a..08feda3e2 100644 --- a/website/migrations/0144_delete_contributorstats_and_more.py +++ b/website/migrations/0144_delete_contributorstats_and_more.py @@ -65,8 +65,6 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="contribution", - index=models.Index( - fields=["repository", "created"], name="website_con_reposit_9a7e49_idx" - ), + index=models.Index(fields=["repository", "created"], name="website_con_reposit_9a7e49_idx"), ), ] diff --git a/website/migrations/0155_syning_contributors_tag_20241124_0457.py b/website/migrations/0155_syning_contributors_tag_20241124_0457.py index 77898edac..8acf31594 100644 --- a/website/migrations/0155_syning_contributors_tag_20241124_0457.py +++ b/website/migrations/0155_syning_contributors_tag_20241124_0457.py @@ -9,9 +9,7 @@ def replace_blt_contributors_tag(apps, schema_editor): UserProfile = apps.get_model("website", "UserProfile") # Get or create the "BLT Contributors" tag - blt_contributors_tag, _ = Tag.objects.get_or_create( - name="BLT Contributors", slug="blt_contributors" - ) + blt_contributors_tag, _ = Tag.objects.get_or_create(name="BLT Contributors", slug="blt_contributors") try: # Get the "BLT-Contributors" tag diff --git a/website/migrations/0157_badge_userbadge.py b/website/migrations/0157_badge_userbadge.py index 3af40b377..5a9e958fe 100644 --- a/website/migrations/0157_badge_userbadge.py +++ b/website/migrations/0157_badge_userbadge.py @@ -68,9 +68,7 @@ class Migration(migrations.Migration): ), ( "badge", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="website.badge" - ), + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="website.badge"), ), ( "user", diff --git a/website/migrations/0160_activity_dislike_count_activity_dislikes_and_more.py b/website/migrations/0160_activity_dislike_count_activity_dislikes_and_more.py index fef162d82..0d5c0e68c 100644 --- a/website/migrations/0160_activity_dislike_count_activity_dislikes_and_more.py +++ b/website/migrations/0160_activity_dislike_count_activity_dislikes_and_more.py @@ -43,8 +43,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name="activity", name="likes", - field=models.ManyToManyField( - blank=True, related_name="liked_activities", to=settings.AUTH_USER_MODEL - ), + field=models.ManyToManyField(blank=True, related_name="liked_activities", to=settings.AUTH_USER_MODEL), ), ] diff --git a/website/migrations/0165_add_badge_icons.py b/website/migrations/0165_add_badge_icons.py index b30b5569e..983381428 100644 --- a/website/migrations/0165_add_badge_icons.py +++ b/website/migrations/0165_add_badge_icons.py @@ -89,9 +89,7 @@ def add_badge_icons(apps, schema_editor): print(f"Found image for {badge_data['title']} at {static_icon_path}") # Create the target directory in MEDIA_ROOT (media/badges/) - media_icon_path = os.path.join( - settings.MEDIA_ROOT, "badges", os.path.basename(static_icon_path) - ) + media_icon_path = os.path.join(settings.MEDIA_ROOT, "badges", os.path.basename(static_icon_path)) # Ensure the target directory exists os.makedirs(os.path.dirname(media_icon_path), exist_ok=True) diff --git a/website/migrations/0168_add_streak_badges.py b/website/migrations/0168_add_streak_badges.py index 0620b798e..3222efc3e 100644 --- a/website/migrations/0168_add_streak_badges.py +++ b/website/migrations/0168_add_streak_badges.py @@ -58,9 +58,7 @@ def add_badge_icons(apps, schema_editor): print(f"Found image for {badge_data['title']} at {static_icon_path}") # Create the target directory in MEDIA_ROOT (media/badges/) - media_icon_path = os.path.join( - settings.MEDIA_ROOT, "badges", os.path.basename(static_icon_path) - ) + media_icon_path = os.path.join(settings.MEDIA_ROOT, "badges", os.path.basename(static_icon_path)) # Ensure the target directory exists os.makedirs(os.path.dirname(media_icon_path), exist_ok=True) diff --git a/website/migrations/0173_challenge.py b/website/migrations/0173_challenge.py new file mode 100644 index 000000000..9d960d612 --- /dev/null +++ b/website/migrations/0173_challenge.py @@ -0,0 +1,56 @@ +# Generated by Django 5.1.3 on 2024-12-18 18:54 + +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0172_merge_20241218_0505"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name="Challenge", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("title", models.CharField(max_length=255)), + ("description", models.TextField()), + ( + "challenge_type", + models.CharField( + choices=[("single", "Single User"), ("team", "Team")], + default="single", + max_length=10, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("points", models.IntegerField(default=0)), + ("progress", models.IntegerField(default=0)), + ("completed", models.BooleanField(default=False)), + ("completed_at", models.DateTimeField(blank=True, null=True)), + ( + "participants", + models.ManyToManyField( + blank=True, + related_name="user_challenges", + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "team_participants", + models.ManyToManyField(blank=True, related_name="team_challenges", to="website.organization"), + ), + ], + ), + ] diff --git a/website/migrations/0173_remove_company_admin_remove_company_integrations_and_more.py b/website/migrations/0173_remove_company_admin_remove_company_integrations_and_more.py index 3ea350f44..370dcbdb3 100644 --- a/website/migrations/0173_remove_company_admin_remove_company_integrations_and_more.py +++ b/website/migrations/0173_remove_company_admin_remove_company_integrations_and_more.py @@ -98,9 +98,7 @@ class Migration(migrations.Migration): ), ( "managers", - models.ManyToManyField( - related_name="user_organizations", to=settings.AUTH_USER_MODEL - ), + models.ManyToManyField(related_name="user_organizations", to=settings.AUTH_USER_MODEL), ), ( "subscription", diff --git a/website/migrations/0174_add_single_user_challenges.py b/website/migrations/0174_add_single_user_challenges.py new file mode 100644 index 000000000..4da724f7e --- /dev/null +++ b/website/migrations/0174_add_single_user_challenges.py @@ -0,0 +1,48 @@ +# Generated by Django 5.1.3 on 2024-12-18 09:39 + +from django.db import migrations + +single_user_challenges = [ + { + "title": "Report 5 IPs", + "description": "Report 5 different suspicious IPs to complete this challenge.", + "challenge_type": "single", + "points": 1, + }, + { + "title": "Report 5 Issues", + "description": "Report 5 unique issues to complete this challenge.", + "challenge_type": "single", + "points": 1, + }, + { + "title": "Sign in for 5 Days", + "description": "Sign in for 5 consecutive days to complete this challenge.", + "challenge_type": "single", + "points": 1, + }, +] + + +def add_single_user_challenges(apps, schema_editor): + # Get the Challenge model + Challenge = apps.get_model("website", "Challenge") + + # Loop through the challenges and create them + for challenge_data in single_user_challenges: + Challenge.objects.create( + title=challenge_data["title"], + description=challenge_data["description"], + challenge_type=challenge_data["challenge_type"], + points=challenge_data["points"], + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0173_challenge"), + ] + + operations = [ + migrations.RunPython(add_single_user_challenges), + ] diff --git a/website/migrations/0175_add_team_challenges.py b/website/migrations/0175_add_team_challenges.py new file mode 100644 index 000000000..5ca74b983 --- /dev/null +++ b/website/migrations/0175_add_team_challenges.py @@ -0,0 +1,48 @@ +# Generated by Django 5.1.3 on 2024-12-18 13:12 + +from django.db import migrations + + +def add_team_challenges(apps, schema_editor): + Challenge = apps.get_model("website", "Challenge") + + # Define the team challenges + team_challenges = [ + { + "title": "Report 10 IPs", + "description": "Report 10 different suspicious IPs as a team to complete this challenge.", + "challenge_type": "team", + "points": 1, + }, + { + "title": "Report 10 Issues", + "description": "Report 10 unique issues as a team to complete this challenge.", + "challenge_type": "team", + "points": 1, + }, + { + "title": "All Members Sign in for 5 Days", + "description": "Ensure all team members sign in for 5 consecutive days to complete this challenge.", + "challenge_type": "team", + "points": 1, + }, + ] + + # Insert challenges into the Challenge model + for challenge in team_challenges: + Challenge.objects.create( + title=challenge["title"], + description=challenge["description"], + challenge_type=challenge["challenge_type"], + points=challenge["points"], + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0174_add_single_user_challenges"), + ] + + operations = [ + migrations.RunPython(add_team_challenges), + ] diff --git a/website/migrations/0176_merge_20241219_0544.py b/website/migrations/0176_merge_20241219_0544.py new file mode 100644 index 000000000..39a997785 --- /dev/null +++ b/website/migrations/0176_merge_20241219_0544.py @@ -0,0 +1,12 @@ +# Generated by Django 5.1.3 on 2024-12-19 05:44 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0173_remove_company_admin_remove_company_integrations_and_more"), + ("website", "0175_add_team_challenges"), + ] + + operations = [] diff --git a/website/migrations/0176_repo_contributor_repo_contributor_count_and_more.py b/website/migrations/0176_repo_contributor_repo_contributor_count_and_more.py index b55ff9feb..cff084745 100644 --- a/website/migrations/0176_repo_contributor_repo_contributor_count_and_more.py +++ b/website/migrations/0176_repo_contributor_repo_contributor_count_and_more.py @@ -12,9 +12,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name="repo", name="contributor", - field=models.ManyToManyField( - blank=True, related_name="repos", to="website.contributor" - ), + field=models.ManyToManyField(blank=True, related_name="repos", to="website.contributor"), ), migrations.AddField( model_name="repo", diff --git a/website/migrations/0177_alter_challenge_team_participants.py b/website/migrations/0177_alter_challenge_team_participants.py new file mode 100644 index 000000000..b06da6438 --- /dev/null +++ b/website/migrations/0177_alter_challenge_team_participants.py @@ -0,0 +1,17 @@ +# Generated by Django 5.1.3 on 2024-12-19 06:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0176_merge_20241219_0544"), + ] + + operations = [ + migrations.AlterField( + model_name="challenge", + name="team_participants", + field=models.ManyToManyField(blank=True, related_name="team_challenges", to="website.organization"), + ), + ] diff --git a/website/migrations/0178_merge_20241229_1948.py b/website/migrations/0178_merge_20241229_1948.py new file mode 100644 index 000000000..f930450b9 --- /dev/null +++ b/website/migrations/0178_merge_20241229_1948.py @@ -0,0 +1,12 @@ +# Generated by Django 5.1.3 on 2024-12-29 19:48 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0176_repo_contributor_repo_contributor_count_and_more"), + ("website", "0177_alter_challenge_team_participants"), + ] + + operations = [] diff --git a/website/migrations/0180_merge_0178_merge_20241229_1948_0179_contributorstats.py b/website/migrations/0180_merge_0178_merge_20241229_1948_0179_contributorstats.py new file mode 100644 index 000000000..30025a497 --- /dev/null +++ b/website/migrations/0180_merge_0178_merge_20241229_1948_0179_contributorstats.py @@ -0,0 +1,12 @@ +# Generated by Django 5.1.4 on 2025-01-08 09:04 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0178_merge_20241229_1948"), + ("website", "0179_contributorstats"), + ] + + operations = [] diff --git a/website/migrations/0181_trademarkowner_trademark.py b/website/migrations/0181_trademarkowner_trademark.py new file mode 100644 index 000000000..864b4a284 --- /dev/null +++ b/website/migrations/0181_trademarkowner_trademark.py @@ -0,0 +1,99 @@ +# Generated by Django 5.1.4 on 2025-01-25 19:49 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0180_rename_project_visit_count_repo_repo_visit_count"), + ] + + operations = [ + migrations.CreateModel( + name="TrademarkOwner", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255)), + ("address1", models.CharField(blank=True, max_length=255, null=True)), + ("address2", models.CharField(blank=True, max_length=255, null=True)), + ("city", models.CharField(blank=True, max_length=100, null=True)), + ("state", models.CharField(blank=True, max_length=100, null=True)), + ("country", models.CharField(blank=True, max_length=100, null=True)), + ("postcode", models.CharField(blank=True, max_length=20, null=True)), + ("owner_type", models.CharField(blank=True, max_length=20, null=True)), + ( + "owner_label", + models.CharField(blank=True, max_length=100, null=True), + ), + ( + "legal_entity_type", + models.CharField(blank=True, max_length=20, null=True), + ), + ( + "legal_entity_type_label", + models.CharField(blank=True, max_length=100, null=True), + ), + ], + ), + migrations.CreateModel( + name="Trademark", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("keyword", models.CharField(max_length=255)), + ( + "registration_number", + models.CharField(blank=True, max_length=50, null=True), + ), + ( + "serial_number", + models.CharField(blank=True, max_length=50, null=True), + ), + ( + "status_label", + models.CharField(blank=True, max_length=50, null=True), + ), + ("status_code", models.CharField(blank=True, max_length=20, null=True)), + ("status_date", models.DateField(blank=True, null=True)), + ( + "status_definition", + models.CharField(blank=True, max_length=255, null=True), + ), + ("filing_date", models.DateField(blank=True, null=True)), + ("registration_date", models.DateField(blank=True, null=True)), + ("abandonment_date", models.DateField(blank=True, null=True)), + ("expiration_date", models.DateField(blank=True, null=True)), + ("description", models.TextField(blank=True, null=True)), + ( + "organization", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="trademarks", + to="website.organization", + ), + ), + ( + "owners", + models.ManyToManyField(related_name="trademarks", to="website.trademarkowner"), + ), + ], + ), + ] diff --git a/website/migrations/0183_merge_20250124_0618.py b/website/migrations/0183_merge_20250124_0618.py new file mode 100644 index 000000000..1dec887c1 --- /dev/null +++ b/website/migrations/0183_merge_20250124_0618.py @@ -0,0 +1,12 @@ +# Generated by Django 5.1.4 on 2025-01-24 06:18 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0180_merge_0178_merge_20241229_1948_0179_contributorstats"), + ("website", "0182_project_status"), + ] + + operations = [] diff --git a/website/migrations/0183_slackbotactivity.py b/website/migrations/0183_slackbotactivity.py new file mode 100644 index 000000000..a5cbb2be5 --- /dev/null +++ b/website/migrations/0183_slackbotactivity.py @@ -0,0 +1,58 @@ +# Generated by Django 5.1.3 on 2025-01-24 23:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0182_project_status"), + ] + + operations = [ + migrations.CreateModel( + name="SlackBotActivity", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("workspace_id", models.CharField(max_length=20)), + ( + "workspace_name", + models.CharField(blank=True, max_length=255, null=True), + ), + ( + "activity_type", + models.CharField( + choices=[ + ("team_join", "Team Join"), + ("command", "Slash Command"), + ("message", "Message"), + ("error", "Error"), + ], + max_length=20, + ), + ), + ("user_id", models.CharField(blank=True, max_length=20, null=True)), + ("details", models.JSONField(default=dict)), + ("success", models.BooleanField(default=True)), + ("error_message", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True, db_index=True)), + ], + options={ + "ordering": ["-created"], + "indexes": [ + models.Index( + fields=["workspace_id", "activity_type"], + name="website_sla_workspa_1c714c_idx", + ), + models.Index(fields=["created"], name="website_sla_created_6b716d_idx"), + ], + }, + ), + ] diff --git a/website/migrations/0184_merge_0183_merge_20250124_0618_0183_slackbotactivity.py b/website/migrations/0184_merge_0183_merge_20250124_0618_0183_slackbotactivity.py new file mode 100644 index 000000000..2fb3be0e6 --- /dev/null +++ b/website/migrations/0184_merge_0183_merge_20250124_0618_0183_slackbotactivity.py @@ -0,0 +1,12 @@ +# Generated by Django 5.1.4 on 2025-01-26 10:10 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0183_merge_20250124_0618"), + ("website", "0183_slackbotactivity"), + ] + + operations = [] diff --git a/website/migrations/0184_merge_20250125_2005.py b/website/migrations/0184_merge_20250125_2005.py new file mode 100644 index 000000000..77a374567 --- /dev/null +++ b/website/migrations/0184_merge_20250125_2005.py @@ -0,0 +1,12 @@ +# Generated by Django 5.1.4 on 2025-01-25 20:05 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0181_trademarkowner_trademark"), + ("website", "0183_slackbotactivity"), + ] + + operations = [] diff --git a/website/migrations/0185_merge_20250126_1451.py b/website/migrations/0185_merge_20250126_1451.py new file mode 100644 index 000000000..b1a6a9017 --- /dev/null +++ b/website/migrations/0185_merge_20250126_1451.py @@ -0,0 +1,12 @@ +# Generated by Django 5.1.4 on 2025-01-26 14:51 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("website", "0184_merge_0183_merge_20250124_0618_0183_slackbotactivity"), + ("website", "0184_merge_20250125_2005"), + ] + + operations = [] diff --git a/website/models.py b/website/models.py index ca319cdd2..49fd378e3 100644 --- a/website/models.py +++ b/website/models.py @@ -256,6 +256,45 @@ def get_or_set_x_url(self, name): pass +class TrademarkOwner(models.Model): + name = models.CharField(max_length=255) + address1 = models.CharField(max_length=255, blank=True, null=True) + address2 = models.CharField(max_length=255, blank=True, null=True) + city = models.CharField(max_length=100, blank=True, null=True) + state = models.CharField(max_length=100, blank=True, null=True) + country = models.CharField(max_length=100, blank=True, null=True) + postcode = models.CharField(max_length=20, blank=True, null=True) + owner_type = models.CharField(max_length=20, blank=True, null=True) + owner_label = models.CharField(max_length=100, blank=True, null=True) + legal_entity_type = models.CharField(max_length=20, blank=True, null=True) + legal_entity_type_label = models.CharField(max_length=100, blank=True, null=True) + + def __str__(self): + return self.name + + +class Trademark(models.Model): + keyword = models.CharField(max_length=255) + registration_number = models.CharField(max_length=50, blank=True, null=True) + serial_number = models.CharField(max_length=50, blank=True, null=True) + status_label = models.CharField(max_length=50, blank=True, null=True) + status_code = models.CharField(max_length=20, blank=True, null=True) + status_date = models.DateField(blank=True, null=True) + status_definition = models.CharField(max_length=255, blank=True, null=True) + filing_date = models.DateField(blank=True, null=True) + registration_date = models.DateField(blank=True, null=True) + abandonment_date = models.DateField(blank=True, null=True) + expiration_date = models.DateField(blank=True, null=True) + description = models.TextField(blank=True, null=True) + owners = models.ManyToManyField(TrademarkOwner, related_name="trademarks") + organization = models.ForeignKey( + Organization, null=True, blank=True, on_delete=models.CASCADE, related_name="trademarks" + ) + + def __str__(self): + return self.keyword + + def validate_image(fieldfile_obj): try: filesize = fieldfile_obj.file.size @@ -531,6 +570,9 @@ class Points(models.Model): modified = models.DateTimeField(auto_now=True) reason = models.TextField(null=True, blank=True) + def __str__(self): + return f"{self.user.username} - {self.score} points" + class InviteFriend(models.Model): sender = models.ForeignKey(User, related_name="sent_invites", on_delete=models.CASCADE) @@ -1298,3 +1340,55 @@ class Meta: def __str__(self): return f"{self.contributor.name} in {self.repo.name} " f"on {self.date} [{self.granularity}]" + + +class SlackBotActivity(models.Model): + ACTIVITY_TYPES = [ + ("team_join", "Team Join"), + ("command", "Slash Command"), + ("message", "Message"), + ("error", "Error"), + ] + + workspace_id = models.CharField(max_length=20) + workspace_name = models.CharField(max_length=255, null=True, blank=True) + activity_type = models.CharField(max_length=20, choices=ACTIVITY_TYPES) + user_id = models.CharField(max_length=20, null=True, blank=True) + details = models.JSONField(default=dict) # Stores flexible activity-specific data + success = models.BooleanField(default=True) + error_message = models.TextField(null=True, blank=True) + created = models.DateTimeField(auto_now_add=True, db_index=True) + + class Meta: + ordering = ["-created"] + indexes = [ + models.Index(fields=["workspace_id", "activity_type"]), + models.Index(fields=["created"]), + ] + + def __str__(self): + return f"{self.get_activity_type_display()} in {self.workspace_name} at {self.created}" + + +class Challenge(models.Model): + CHALLENGE_TYPE_CHOICES = [ + ("single", "Single User"), + ("team", "Team"), + ] + + title = models.CharField(max_length=255) + description = models.TextField() + challenge_type = models.CharField(max_length=10, choices=CHALLENGE_TYPE_CHOICES, default="single") + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + participants = models.ManyToManyField(User, related_name="user_challenges", blank=True) # For single users + team_participants = models.ManyToManyField( + Organization, related_name="team_challenges", blank=True + ) # For team challenges + points = models.IntegerField(default=0) # Points for completing the challenge + progress = models.IntegerField(default=0) # Progress in percentage + completed = models.BooleanField(default=False) + completed_at = models.DateTimeField(null=True, blank=True) + + def __str__(self): + return self.title diff --git a/website/serializers.py b/website/serializers.py index 5e1129213..bd88a7d43 100644 --- a/website/serializers.py +++ b/website/serializers.py @@ -35,11 +35,7 @@ class UserProfileSerializer(serializers.ModelSerializer): """ def get_total_score(self, instance): - score = ( - Points.objects.filter(user=instance.user) - .aggregate(total_score=Sum("score")) - .get("total_score") - ) + score = Points.objects.filter(user=instance.user).aggregate(total_score=Sum("score")).get("total_score") if score is None: return 0 return score diff --git a/website/templates/account/signup.html b/website/templates/account/signup.html index 67639c185..2902971f9 100644 --- a/website/templates/account/signup.html +++ b/website/templates/account/signup.html @@ -19,6 +19,16 @@ {% load i18n %} {% load custom_tags %} {% block natural_content %} + {% include "includes/sidenav.html" %}
@@ -117,6 +127,11 @@

{% trans "Account SignUp"

{{ form.password2.errors }}
+
+ +
{{ form.captcha }}
+ {{ form.captcha.errors }} +
{% if redirect_field_value %} {% endblock style %} {% block title %}{{ domain.name }} - Domain Dashboard{% endblock %} @@ -191,6 +324,77 @@
⌨️ {% trans "Closed Issues" %} {{ bug_type_2 | length }}
+
+
+
+

+ + Trademark Results For {{ name }} +

+ + {{ trademarks|length }} Matches Found + +
+
+ {% for trademark in trademarks %} +
+

+ + {{ trademark.keyword }} +

+
+
+ + Reg No.: {{ trademark.registration_number | default:"N/A" }} +
+
+ + Serial Number: {{ trademark.serial_number }} +
+
+ + Filing Date: {{ trademark.filing_date | default:"N/A" }} +
+
+ + Reg Date: {{ trademark.registration_date | default:"N/A" }} +
+
+ + Status Date: {{ trademark.status_date | default:"N/A" }} +
+
+ + Status: + + {{ trademark.status_label }} + +
+
+

+ + Description: {{ trademark.description }} +

+ {% if trademark.owners %} +
+ Owners: +
    + {% for owner in trademark.owners.all %} +
  • + {% if owner.name %}{{ owner.name }}{% endif %} + {% if owner.city %}, {{ owner.city }}{% endif %} + {% if owner.state %}, {{ owner.state }}{% endif %} + {% if owner.country %}, {{ owner.country }}{% endif %} +
  • + {% endfor %} +
+
+ {% endif %} +
+ {% endfor %} +
+
+
Monthly Report
diff --git a/website/templates/includes/header.html b/website/templates/includes/header.html index 5752c4012..c8f7a2540 100644 --- a/website/templates/includes/header.html +++ b/website/templates/includes/header.html @@ -149,7 +149,7 @@ action="{% url 'search' %}" method="get"> - + - - {% trans "Organizations" %} + + {% trans "All" %}