diff --git a/deploy/playbooks/04_cron.yml b/deploy/playbooks/04_cron.yml index 6143303..2b9f8fc 100644 --- a/deploy/playbooks/04_cron.yml +++ b/deploy/playbooks/04_cron.yml @@ -2,9 +2,16 @@ hosts: intbot_app tasks: - - name: "Download pretalx data every hour" + - name: "Download pretalx data once a day" ansible.builtin.cron: - name: "Download pretalx data every hour" + name: "Download pretalx data once a day at 05:05am" minute: "5" - hour: "8" # run once a day at 08:05 am + hour: "5" job: "make prod/cron/pretalx" + + - name: "Download pretix data once a day" + ansible.builtin.cron: + name: "Download pretix data once a day at 06:05am" + minute: "5" + hour: "6" + job: "make prod/cron/pretix" diff --git a/deploy/templates/app/Makefile.app.j2 b/deploy/templates/app/Makefile.app.j2 index 5932836..5a21e97 100644 --- a/deploy/templates/app/Makefile.app.j2 +++ b/deploy/templates/app/Makefile.app.j2 @@ -18,5 +18,9 @@ prod/manage: prod/cron/pretalx: $(MAKE_APP) in-container/manage ARG="download_pretalx_data --event=europython-2025" + +prod/cron/pretix: + $(MAKE_APP) in-container/manage ARG="download_pretix_data --event=ep2025" + logs: docker compose logs -f diff --git a/deploy/templates/app/intbot.env.example b/deploy/templates/app/intbot.env.example index 5904ea9..5046fdc 100644 --- a/deploy/templates/app/intbot.env.example +++ b/deploy/templates/app/intbot.env.example @@ -58,3 +58,9 @@ ZAMMAD_GROUP_PROGRAMME="zammad-programme-group-name-goes-here" ZAMMAD_GROUP_FINAID="zammad-finaid-group-name-goes-here" ZAMMAD_GROUP_SPONSORS="zammad-sponsors-group-name-goes-here" ZAMMAD_GROUP_GRANTS="zammad-grants-group-name-goes-here" + +# Pretalx +PRETALX_API_TOKEN="pretalx-api-token" + +# Pretix +PRETIX_API_TOKEN="pretix-api-token" diff --git a/intbot/core/admin.py b/intbot/core/admin.py index 5047065..315473f 100644 --- a/intbot/core/admin.py +++ b/intbot/core/admin.py @@ -1,6 +1,6 @@ import json -from core.models import DiscordMessage, PretalxData, Webhook +from core.models import DiscordMessage, PretalxData, PretixData, Webhook from django.contrib import admin from django.utils.html import format_html @@ -93,6 +93,33 @@ def pretty_content(self, obj: PretalxData): pretty_content.short_description = "Content" +class PretixDataAdmin(admin.ModelAdmin): + list_display = [ + "uuid", + "resource", + "created_at", + "modified_at", + ] + list_filter = [ + "created_at", + "resource", + ] + readonly_fields = fields = [ + "uuid", + "resource", + "pretty_content", + "created_at", + "modified_at", + "processed_at", + ] + + def pretty_content(self, obj: PretixData): + return format_html("
{}", json.dumps(obj.content, indent=4))
+
+ pretty_content.short_description = "Content"
+
+
admin.site.register(Webhook, WebhookAdmin)
admin.site.register(DiscordMessage, DiscordMessageAdmin)
admin.site.register(PretalxData, PretalxDataAdmin)
+admin.site.register(PretixData, PretixDataAdmin)
diff --git a/intbot/core/integrations/pretix.py b/intbot/core/integrations/pretix.py
new file mode 100644
index 0000000..2120ee9
--- /dev/null
+++ b/intbot/core/integrations/pretix.py
@@ -0,0 +1,100 @@
+import logging
+from typing import Any
+
+import httpx
+from core.models import PretixData
+from django.conf import settings
+
+logger = logging.getLogger(__name__)
+
+PRETIX_EVENTS = [
+ "2022",
+ "ep2023",
+ "ep2024",
+ "ep2025",
+]
+
+ENDPOINTS = {
+ PretixData.PretixResources.orders: "orders/",
+ PretixData.PretixResources.products: "items/",
+ PretixData.PretixResources.vouchers: "vouchers/",
+}
+
+
+JsonType = dict[str, Any]
+
+
+def get_event_url(event: str) -> str:
+ assert event in PRETIX_EVENTS
+
+ pretix_url = "https://tickets.europython.eu"
+ return f"{pretix_url}/api/v1/organizers/europython/events/{event}/"
+
+
+def fetch_pretix_data(
+ event: str, resource: PretixData.PretixResources
+) -> list[JsonType]:
+ headers = {
+ "Authorization": f"Token {settings.PRETIX_API_TOKEN}",
+ "Content-Type": "application/json",
+ }
+
+ base_url = get_event_url(event)
+ endpoint = ENDPOINTS[resource]
+ url = f"{base_url}{endpoint}"
+
+ # Pretix paginates the output, so we will need to do multiple requests and
+ # then merge multiple pages to one big dictionary
+ results = []
+ page = 0
+
+ # This takes advantage of the fact that url will contain a url to the
+ # next page, until there is more data to fetch. If this is the last page,
+ # then the url will be None (falsy), and thus stop the while loop.
+ while url:
+ page += 1
+ response = httpx.get(url, headers=headers)
+
+ if response.status_code != 200:
+ raise Exception(f"Error {response.status_code}: {response.text}")
+
+ logger.info("Fetching data from %s, page %s", url, page)
+
+ data = response.json()
+ results += data["results"]
+ url = data["next"]
+
+ return results
+
+
+def download_latest_orders(event: str) -> PretixData:
+ data = fetch_pretix_data(event, PretixData.PretixResources.orders)
+
+ pretix_data = PretixData.objects.create(
+ resource=PretixData.PretixResources.orders,
+ content=data,
+ )
+
+ return pretix_data
+
+
+def download_latest_products(event: str) -> PretixData:
+ data = fetch_pretix_data(event, PretixData.PretixResources.products)
+
+ pretix_data = PretixData.objects.create(
+ resource=PretixData.PretixResources.products,
+ content=data,
+ )
+
+ return pretix_data
+
+
+def download_latest_vouchers(event: str) -> PretixData:
+ data = fetch_pretix_data(event, PretixData.PretixResources.vouchers)
+
+ pretix_data = PretixData.objects.create(
+ resource=PretixData.PretixResources.vouchers,
+ content=data,
+ )
+
+ return pretix_data
diff --git a/intbot/core/management/commands/download_pretix_data.py b/intbot/core/management/commands/download_pretix_data.py
new file mode 100644
index 0000000..d0c9c94
--- /dev/null
+++ b/intbot/core/management/commands/download_pretix_data.py
@@ -0,0 +1,32 @@
+from core.integrations.pretix import (
+ PRETIX_EVENTS,
+ download_latest_orders,
+ download_latest_products,
+ download_latest_vouchers,
+)
+from django.core.management.base import BaseCommand
+
+
+class Command(BaseCommand):
+ help = "Downloads latest pretix data"
+
+ def add_arguments(self, parser):
+ # Add keyword argument event
+ parser.add_argument(
+ "--event",
+ choices=PRETIX_EVENTS,
+ help="slug of the event (for example `ep2025`)",
+ required=True,
+ )
+
+ def handle(self, **kwargs):
+ event = kwargs["event"]
+
+ self.stdout.write(f"Downloading latest products from pretix... {event}")
+ download_latest_products(event)
+
+ self.stdout.write(f"Downloading latest vouchers from pretix... {event}")
+ download_latest_vouchers(event)
+
+ self.stdout.write(f"Downloading latest orders from pretix... {event}")
+ download_latest_orders(event)
diff --git a/intbot/core/migrations/0006_add_pretix_data_model.py b/intbot/core/migrations/0006_add_pretix_data_model.py
new file mode 100644
index 0000000..92cdc24
--- /dev/null
+++ b/intbot/core/migrations/0006_add_pretix_data_model.py
@@ -0,0 +1,43 @@
+# Generated by Django 5.1.4 on 2025-04-24 22:08
+
+import uuid
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("core", "0005_add_pretalx_data_model"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="PretixData",
+ fields=[
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("uuid", models.UUIDField(default=uuid.uuid4)),
+ (
+ "resource",
+ models.CharField(
+ choices=[
+ ("orders", "Orders"),
+ ("products", "Products"),
+ ("vouchers", "Vouchers"),
+ ],
+ max_length=255,
+ ),
+ ),
+ ("content", models.JSONField()),
+ ("created_at", models.DateTimeField(auto_now_add=True)),
+ ("modified_at", models.DateTimeField(auto_now=True)),
+ ("processed_at", models.DateTimeField(blank=True, null=True)),
+ ],
+ ),
+ ]
diff --git a/intbot/core/models.py b/intbot/core/models.py
index 3192ab0..c36735a 100644
--- a/intbot/core/models.py
+++ b/intbot/core/models.py
@@ -87,9 +87,7 @@ class PretalxData(models.Model):
"""
Table to store raw data download from pretalx for later parsing.
- We first download data from pretalx to this table, and then fire a separate
- background task that pulls data from this table and stores in separate
- "business" tables, like "Proposal" or "Speaker".
+ We first download data from pretix, then we later parse the latest jsons.
"""
class PretalxResources(models.TextChoices):
@@ -108,5 +106,38 @@ class PretalxResources(models.TextChoices):
modified_at = models.DateTimeField(auto_now=True)
processed_at = models.DateTimeField(blank=True, null=True)
+ class Meta:
+ verbose_name_plural = "Pretalx Data"
+
+ def __str__(self):
+ return f"{self.uuid}"
+
+
+class PretixData(models.Model):
+ """
+ Table to store raw data download from pretix for later parsing.
+
+ We first download data from pretix, then we later parse the latest jsons.
+ """
+
+ class PretixResources(models.TextChoices):
+ orders = "orders", "Orders"
+ products = "products", "Products"
+ vouchers = "vouchers", "Vouchers"
+
+ uuid = models.UUIDField(default=uuid.uuid4)
+ resource = models.CharField(
+ max_length=255,
+ choices=PretixResources.choices,
+ )
+ content = models.JSONField()
+
+ created_at = models.DateTimeField(auto_now_add=True)
+ modified_at = models.DateTimeField(auto_now=True)
+ processed_at = models.DateTimeField(blank=True, null=True)
+
+ class Meta:
+ verbose_name_plural = "Pretix Data"
+
def __str__(self):
return f"{self.uuid}"
diff --git a/intbot/intbot/settings.py b/intbot/intbot/settings.py
index a1d555a..80b5fe6 100644
--- a/intbot/intbot/settings.py
+++ b/intbot/intbot/settings.py
@@ -200,6 +200,9 @@ def get(name) -> str:
# Pretalx
PRETALX_API_TOKEN = get("PRETALX_API_TOKEN")
+# Pretix
+PRETIX_API_TOKEN = get("PRETIX_API_TOKEN")
+
if DJANGO_ENV == "dev":
DEBUG = True
diff --git a/intbot/tests/test_admin.py b/intbot/tests/test_admin.py
index fd537ce..5005369 100644
--- a/intbot/tests/test_admin.py
+++ b/intbot/tests/test_admin.py
@@ -2,7 +2,7 @@
Sanity checks (mostly) if the admin resources are available
"""
-from core.models import DiscordMessage, PretalxData, Webhook
+from core.models import DiscordMessage, PretalxData, PretixData, Webhook
def test_admin_for_webhooks_sanity_check(admin_client):
@@ -34,33 +34,69 @@ def test_admin_for_discordmessages_sanity_check(admin_client):
assert dm.channel_name.encode() in response.content
-def test_admin_list_for_pretalx_data(admin_client):
- """Simple sanity check if the page loads correctly"""
- url = "/admin/core/pretalxdata/"
- pd = PretalxData.objects.create(
- resource=PretalxData.PretalxResources.speakers,
- content={},
- )
- assert pd.uuid
-
- response = admin_client.get(url)
-
- assert response.status_code == 200
- assert str(pd.uuid).encode() in response.content
- assert pd.get_resource_display().encode() in response.content
-
-
-def test_admin_change_for_pretalx_data(admin_client):
- """Simple sanity check if the page loads correctly"""
- url = "/admin/core/pretalxdata/"
- pd = PretalxData.objects.create(
- resource=PretalxData.PretalxResources.speakers,
- content={},
- )
- assert pd.uuid
-
- response = admin_client.get(f"{url}{pd.pk}/change/")
-
- assert response.status_code == 200
- assert str(pd.uuid).encode() in response.content
- assert pd.get_resource_display().encode() in response.content
+class TestPretalxDataAdmin:
+ """This class exists only for namespacing purposes"""
+
+ def test_admin_list_for_pretalx_data(self, admin_client):
+ """Simple sanity check if the page loads correctly"""
+ url = "/admin/core/pretalxdata/"
+ pd = PretalxData.objects.create(
+ resource=PretalxData.PretalxResources.speakers,
+ content={},
+ )
+ assert pd.uuid
+
+ response = admin_client.get(url)
+
+ assert response.status_code == 200
+ assert str(pd.uuid).encode() in response.content
+ assert pd.get_resource_display().encode() in response.content
+
+ def test_admin_change_for_pretalx_data(self, admin_client):
+ """Simple sanity check if the page loads correctly"""
+ url = "/admin/core/pretalxdata/"
+ pd = PretalxData.objects.create(
+ resource=PretalxData.PretalxResources.speakers,
+ content={},
+ )
+ assert pd.uuid
+
+ response = admin_client.get(f"{url}{pd.pk}/change/")
+
+ assert response.status_code == 200
+ assert str(pd.uuid).encode() in response.content
+ assert pd.get_resource_display().encode() in response.content
+
+
+class TestPretixDataAdmin:
+ """This class exists only for namespacing purposes"""
+
+ def test_admin_list_for_pretix_data(self, admin_client):
+ """Simple sanity check if the page loads correctly"""
+ url = "/admin/core/pretixdata/"
+ pd = PretixData.objects.create(
+ resource=PretixData.PretixResources.orders,
+ content={},
+ )
+ assert pd.uuid
+
+ response = admin_client.get(url)
+
+ assert response.status_code == 200
+ assert str(pd.uuid).encode() in response.content
+ assert pd.get_resource_display().encode() in response.content
+
+ def test_admin_change_for_pretix_data(self, admin_client):
+ """Simple sanity check if the page loads correctly"""
+ url = "/admin/core/pretixdata/"
+ pd = PretixData.objects.create(
+ resource=PretixData.PretixResources.orders,
+ content={},
+ )
+ assert pd.uuid
+
+ response = admin_client.get(f"{url}{pd.pk}/change/")
+
+ assert response.status_code == 200
+ assert str(pd.uuid).encode() in response.content
+ assert pd.get_resource_display().encode() in response.content
diff --git a/intbot/tests/test_integrations/test_pretix.py b/intbot/tests/test_integrations/test_pretix.py
new file mode 100644
index 0000000..6113859
--- /dev/null
+++ b/intbot/tests/test_integrations/test_pretix.py
@@ -0,0 +1,200 @@
+import pytest
+import respx
+from core.integrations import pretix
+from core.models import PretixData
+from httpx import Response
+
+
+def orders_pages_generator(url):
+ """
+ Generator to simulate pagination.
+
+ Extracted to a generator because we use it in multiple places
+ """
+ yield Response(
+ 200,
+ json={
+ "results": [
+ {"hello": "world"},
+ ],
+ "next": f"{url}&page=2",
+ },
+ )
+
+ yield Response(
+ 200,
+ json={
+ "results": [
+ {"foo": "bar"},
+ ],
+ # It's important to make it last page in tests.
+ # Otherwise it will be infinite loop :)
+ "next": None,
+ },
+ )
+
+
+def vouchers_pages_generator(url):
+ """
+ Generator to simulate pagination.
+
+ Extracted to a generator because we use it in multiple places
+ """
+ yield Response(
+ 200,
+ json={
+ "results": [
+ {"hello": "world"},
+ ],
+ "next": f"{url}&page=2",
+ },
+ )
+
+ yield Response(
+ 200,
+ json={
+ "results": [
+ {"foo": "bar"},
+ ],
+ # It's important to make it last page in tests.
+ # Otherwise it will be infinite loop :)
+ "next": None,
+ },
+ )
+
+
+def products_pages_generator(url):
+ """
+ Generator to simulate pagination.
+
+ Extracted to a generator because we use it in multiple places
+ """
+ yield Response(
+ 200,
+ json={
+ "results": [
+ {"hello": "world"},
+ ],
+ "next": f"{url}&page=2",
+ },
+ )
+
+ yield Response(
+ 200,
+ json={
+ "results": [
+ {"foo": "bar"},
+ ],
+ # It's important to make it last page in tests.
+ # Otherwise it will be infinite loop :)
+ "next": None,
+ },
+ )
+
+
+@respx.mock
+def test_fetch_orders_from_pretix():
+ url = "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/orders/"
+ data = orders_pages_generator(url)
+ respx.get(url).mock(return_value=next(data))
+ respx.get(url + "&page=2").mock(return_value=next(data))
+
+ orders = pretix.fetch_pretix_data(
+ "ep2025",
+ PretixData.PretixResources.orders,
+ )
+
+ assert orders == [
+ {"hello": "world"},
+ {"foo": "bar"},
+ ]
+
+
+@respx.mock
+def test_fetch_vouchers_from_pretix():
+ url = "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/vouchers/"
+ data = vouchers_pages_generator(url)
+ respx.get(url).mock(return_value=next(data))
+ respx.get(url + "&page=2").mock(return_value=next(data))
+
+ orders = pretix.fetch_pretix_data(
+ "ep2025",
+ PretixData.PretixResources.vouchers,
+ )
+
+ assert orders == [
+ {"hello": "world"},
+ {"foo": "bar"},
+ ]
+
+
+@respx.mock
+def test_fetch_products_from_pretix():
+ url = "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/items/"
+ data = products_pages_generator(url)
+ respx.get(url).mock(return_value=next(data))
+ respx.get(url + "&page=2").mock(return_value=next(data))
+
+ orders = pretix.fetch_pretix_data(
+ "ep2025",
+ PretixData.PretixResources.products,
+ )
+
+ assert orders == [
+ {"hello": "world"},
+ {"foo": "bar"},
+ ]
+
+
+@respx.mock
+@pytest.mark.django_db
+def test_download_latest_orders():
+ url = "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/orders/"
+ data = orders_pages_generator(url)
+ respx.get(url).mock(return_value=next(data))
+ respx.get(url + "&page=2").mock(return_value=next(data))
+
+ pretix.download_latest_orders("ep2025")
+
+ pd = PretixData.objects.get(resource=PretixData.PretixResources.orders)
+ assert pd.resource == "orders"
+ assert pd.content == [
+ {"hello": "world"},
+ {"foo": "bar"},
+ ]
+
+
+@respx.mock
+@pytest.mark.django_db
+def test_download_latest_products():
+ url = "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/items/"
+ data = orders_pages_generator(url)
+ respx.get(url).mock(return_value=next(data))
+ respx.get(url + "&page=2").mock(return_value=next(data))
+
+ pretix.download_latest_products("ep2025")
+
+ pd = PretixData.objects.get(resource=PretixData.PretixResources.products)
+ assert pd.resource == "products"
+ assert pd.content == [
+ {"hello": "world"},
+ {"foo": "bar"},
+ ]
+
+
+@respx.mock
+@pytest.mark.django_db
+def test_download_latest_vouchers():
+ url = "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/vouchers/"
+ data = orders_pages_generator(url)
+ respx.get(url).mock(return_value=next(data))
+ respx.get(url + "&page=2").mock(return_value=next(data))
+
+ pretix.download_latest_vouchers("ep2025")
+
+ pd = PretixData.objects.get(resource=PretixData.PretixResources.vouchers)
+ assert pd.resource == "vouchers"
+ assert pd.content == [
+ {"hello": "world"},
+ {"foo": "bar"},
+ ]
diff --git a/intbot/tests/test_managment_commands.py b/intbot/tests/test_managment_commands.py
new file mode 100644
index 0000000..2db64f5
--- /dev/null
+++ b/intbot/tests/test_managment_commands.py
@@ -0,0 +1,66 @@
+import pytest
+import respx
+from core.models import PretalxData, PretixData
+from django.core.management import call_command
+from httpx import Response
+
+
+@respx.mock
+@pytest.mark.django_db
+def test_download_pretalx_data_command(capsys):
+ for url in [
+ "https://pretalx.com/api/events/europython-2025/submissions/?questions=all",
+ "https://pretalx.com/api/events/europython-2025/speakers/?questions=all",
+ ]:
+ respx.get(url).mock(
+ return_value=Response(200, json={"results": [], "next": None})
+ )
+
+ call_command("download_pretalx_data", event="europython-2025")
+
+ # Minimal sanity checks
+ stdout, stderr = capsys.readouterr() # capture stdout / stderr
+ assert "Downloading latest speakers" in stdout
+ assert "Downloading latest submissions" in stdout
+ assert (
+ PretalxData.objects.get(
+ resource=PretalxData.PretalxResources.submissions
+ ).content
+ == []
+ )
+ assert (
+ PretalxData.objects.get(resource=PretalxData.PretalxResources.speakers).content
+ == []
+ )
+
+
+@respx.mock
+@pytest.mark.django_db
+def test_download_pretix_data_command(capsys):
+ for url in [
+ "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/orders/",
+ "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/items/",
+ "https://tickets.europython.eu/api/v1/organizers/europython/events/ep2025/vouchers/",
+ ]:
+ respx.get(url).mock(
+ return_value=Response(200, json={"results": [], "next": None})
+ )
+
+ call_command("download_pretix_data", event="ep2025")
+
+ # Minimal sanity checks
+ stdout, stderr = capsys.readouterr() # capture stdout / stderr
+ assert "Downloading latest products" in stdout
+ assert "Downloading latest vouchers" in stdout
+ assert "Downloading latest orders" in stdout
+ assert (
+ PretixData.objects.get(resource=PretixData.PretixResources.orders).content == []
+ )
+ assert (
+ PretixData.objects.get(resource=PretixData.PretixResources.vouchers).content
+ == []
+ )
+ assert (
+ PretixData.objects.get(resource=PretixData.PretixResources.products).content
+ == []
+ )
diff --git a/pyproject.toml b/pyproject.toml
index c7d0330..0e8e151 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,6 +25,7 @@ dependencies = [
"respx>=0.22.0",
"pydantic>=2.10.6",
"freezegun>=1.5.1",
+ "ipython>=9.1.0",
]
[tool.pytest.ini_options]
diff --git a/uv.lock b/uv.lock
index bbe66b9..9c8553d 100644
--- a/uv.lock
+++ b/uv.lock
@@ -86,6 +86,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828 },
]
+[[package]]
+name = "asttokens"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918 },
+]
+
[[package]]
name = "attrs"
version = "24.3.0"
@@ -131,6 +140,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c3/54/de0893186a221478f5880283119fc40483bc460b27c4c71d1b8bba3474b9/coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247", size = 211692 },
]
+[[package]]
+name = "decorator"
+version = "5.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 },
+]
+
[[package]]
name = "discord-py"
version = "2.4.0"
@@ -212,6 +230,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/a3/f9c6634f67c5bcd309b17fe756ee4a321779806ab515e7ccc6333a439275/django_tasks-0.6.1-py3-none-any.whl", hash = "sha256:b3648e28bdcda809cb7831f3aff98aa46c327025447c462b8943cce9dfbb0281", size = 36330 },
]
+[[package]]
+name = "executing"
+version = "2.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702 },
+]
+
[[package]]
name = "fancycompleter"
version = "0.9.1"
@@ -341,6 +368,7 @@ dependencies = [
{ name = "freezegun" },
{ name = "gunicorn" },
{ name = "httpx" },
+ { name = "ipython" },
{ name = "mypy" },
{ name = "pdbpp" },
{ name = "psycopg" },
@@ -365,6 +393,7 @@ requires-dist = [
{ name = "freezegun", specifier = ">=1.5.1" },
{ name = "gunicorn", specifier = ">=23.0.0" },
{ name = "httpx", specifier = ">=0.28.1" },
+ { name = "ipython", specifier = ">=9.1.0" },
{ name = "mypy", specifier = ">=1.14.1" },
{ name = "pdbpp", specifier = ">=0.10.3" },
{ name = "psycopg", specifier = ">=3.2.3" },
@@ -379,6 +408,63 @@ requires-dist = [
{ name = "whitenoise", specifier = ">=6.8.2" },
]
+[[package]]
+name = "ipython"
+version = "9.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "decorator" },
+ { name = "ipython-pygments-lexers" },
+ { name = "jedi" },
+ { name = "matplotlib-inline" },
+ { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
+ { name = "prompt-toolkit" },
+ { name = "pygments" },
+ { name = "stack-data" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/70/9a/6b8984bedc990f3a4aa40ba8436dea27e23d26a64527de7c2e5e12e76841/ipython-9.1.0.tar.gz", hash = "sha256:a47e13a5e05e02f3b8e1e7a0f9db372199fe8c3763532fe7a1e0379e4e135f16", size = 4373688 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b2/9d/4ff2adf55d1b6e3777b0303fdbe5b723f76e46cba4a53a32fe82260d2077/ipython-9.1.0-py3-none-any.whl", hash = "sha256:2df07257ec2f84a6b346b8d83100bcf8fa501c6e01ab75cd3799b0bb253b3d2a", size = 604053 },
+]
+
+[[package]]
+name = "ipython-pygments-lexers"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074 },
+]
+
+[[package]]
+name = "jedi"
+version = "0.19.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "parso" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278 },
+]
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.1.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 },
+]
+
[[package]]
name = "multidict"
version = "6.1.0"
@@ -440,6 +526,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 },
]
+[[package]]
+name = "parso"
+version = "0.8.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 },
+]
+
[[package]]
name = "pdbpp"
version = "0.10.3"
@@ -454,6 +549,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/93/ee/491e63a57fffa78b9de1c337b06c97d0cd0753e88c00571c7b011680332a/pdbpp-0.10.3-py2.py3-none-any.whl", hash = "sha256:79580568e33eb3d6f6b462b1187f53e10cd8e4538f7d31495c9181e2cf9665d1", size = 23961 },
]
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "ptyprocess" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 },
+]
+
[[package]]
name = "pluggy"
version = "1.5.0"
@@ -463,6 +570,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
]
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.51"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "wcwidth" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810 },
+]
+
[[package]]
name = "propcache"
version = "0.2.1"
@@ -501,6 +620,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ce/21/534b8f5bd9734b7a2fcd3a16b1ee82ef6cad81a4796e95ebf4e0c6a24119/psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907", size = 197934 },
]
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 },
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 },
+]
+
[[package]]
name = "pydantic"
version = "2.10.6"
@@ -701,6 +838,29 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415 },
]
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "asttokens" },
+ { name = "executing" },
+ { name = "pure-eval" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 },
+]
+
+[[package]]
+name = "traitlets"
+version = "5.14.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 },
+]
+
[[package]]
name = "types-pyyaml"
version = "6.0.12.20241230"
@@ -728,6 +888,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 },
]
+[[package]]
+name = "wcwidth"
+version = "0.2.13"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 },
+]
+
[[package]]
name = "whitenoise"
version = "6.8.2"