diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml new file mode 100644 index 0000000..334d04e --- /dev/null +++ b/.github/workflows/deploy-dev.yml @@ -0,0 +1,51 @@ +name: Dev deploy to EC2 on Push + +on: + push: + branches: [dev] + +env: + AWS_REGION: "us-east-1" + +# Permission can be added at job level or workflow level +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout +jobs: + DeployToCodeDeploy: + runs-on: ubuntu-latest + steps: + - name: Git clone the repository + uses: actions/checkout@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1.7.0 + with: + role-to-assume: arn:aws:iam::471112976510:role/GitHubAction-AssumeRoleWithAction + role-session-name: GitHub_to_AWS_via_FederatedOIDC + aws-region: ${{ env.AWS_REGION }} + + - name: Generate appspec.yml for dev + run: cp appspec-dev.yml appspec.yml + + - name: Set environment variables + id: vars + run: | + echo "DATETIME=$(date +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_ENV + echo "FILENAME=django-indexer-dev-${DATETIME}.zip" >> $GITHUB_ENV + echo "S3_BUCKET=django-indexer-dev" >> $GITHUB_ENV + + - name: Create zip of repository + run: zip -r "${{ env.FILENAME }}" . + + - name: Upload repository to S3 + run: aws s3 cp "${{ env.FILENAME }}" "s3://${{ env.S3_BUCKET }}/" + + - name: Create CodeDeploy Deployment + id: deploy + run: | + aws deploy create-deployment \ + --application-name django-indexer \ + --deployment-group-name django-indexer-dev-group \ + --deployment-config-name CodeDeployDefault.AllAtOnce \ + --s3-location bucket=${{ env.S3_BUCKET }},bundleType=zip,key=${{ env.FILENAME }} diff --git a/.github/workflows/deploy-testnet.yml b/.github/workflows/deploy-testnet.yml new file mode 100644 index 0000000..d22df3c --- /dev/null +++ b/.github/workflows/deploy-testnet.yml @@ -0,0 +1,66 @@ +name: Testnet deploy to EC2 on Push + +on: + push: + branches: [testnet] + +env: + AWS_REGION: "us-east-1" + +# Permission can be added at job level or workflow level +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout +jobs: + DeployToCodeDeploy: + runs-on: ubuntu-latest + steps: + - name: Git clone the repository + uses: actions/checkout@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1.7.0 + with: + role-to-assume: arn:aws:iam::471112976510:role/GitHubAction-AssumeRoleWithAction + role-session-name: GitHub_to_AWS_via_FederatedOIDC + aws-region: ${{ env.AWS_REGION }} + + - name: Generate appspec.yml for testnet + run: cp appspec-testnet.yml appspec.yml + + - name: Set environment variables + id: vars + run: | + echo "DATETIME=$(date +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_ENV + echo "FILENAME=django-indexer-testnet-${DATETIME}.zip" >> $GITHUB_ENV + echo "S3_BUCKET=django-indexer-testnet" >> $GITHUB_ENV + + - name: Create zip of repository + run: zip -r "${{ env.FILENAME }}" . + + - name: Upload repository to S3 + run: aws s3 cp "${{ env.FILENAME }}" "s3://${{ env.S3_BUCKET }}/" + + - name: Create CodeDeploy Deployment + id: deploy + run: | + aws deploy create-deployment \ + --application-name django-indexer-testnet \ + --deployment-group-name django-indexer-testnet-group \ + --deployment-config-name CodeDeployDefault.AllAtOnce \ + --s3-location bucket=${{ env.S3_BUCKET }},bundleType=zip,key=${{ env.FILENAME }} + + # - name: Create zip of repository + # run: zip -r django-indexer-testnet.zip . + + # - name: Upload repository to S3 + # run: aws s3 cp django-indexer-testnet.zip s3://django-indexer-testnet/ + + # - name: Create CodeDeploy Deployment + # id: deploy + # run: | + # aws deploy create-deployment \ + # --application-name django-indexer-testnet \ + # --deployment-group-name django-indexer-testnet-group \ + # --deployment-config-name CodeDeployDefault.AllAtOnce \ + # --s3-location bucket=django-indexer-testnet,bundleType=zip,key=django-indexer-testnet.zip diff --git a/README.md b/README.md index e04c4f7..fc89206 100644 --- a/README.md +++ b/README.md @@ -3,10 +3,10 @@ - [Steps to run:](#steps-to-run) - [Env vars example](#env-vars-example) - [API Basics](#api-basics) - - [Base URL](#base-url) - - [Authorization](#authorization) - - [Error Responses](#error-responses) - - [Pagination](#pagination) + - [Base URL](#base-url) + - [Authorization](#authorization) + - [Error Responses](#error-responses) + - [Pagination](#pagination) - [API Endpoints](#api-endpoints) - [`Account` endpoints](#account-endpoints) - [✅ Get all accounts: `GET /accounts` (paginated)](#-get-all-accounts-get-accounts-paginated) @@ -52,6 +52,7 @@ - If for some reason this doesn't kill any active celery tasks, run `ps auxww | grep 'celery' | grep -v grep` and kill resulting PIDs Extra commands that might come in useful: + - Purge celery queue (`celery -A base purge`) ### Env vars example @@ -77,13 +78,14 @@ export PL_SENTRY_DSN= #### Base URL -`/api/v1/` +**dev (mainnet):** `https://dev.potlock.io/api/v1/` +**testnet:** `https://test-dev.potlock.io/api/v1/` #### Authorization This is a public, read-only API and as such does not currently implement authentication or authorization. -Rate limits of (FILL THIS IN) are enforced to ensure service for all users. +Rate limits of 100 requests/min are enforced to ensure service for all users. #### Error Responses diff --git a/accounts/admin.py b/accounts/admin.py index fb44fbe..3b8aca7 100644 --- a/accounts/admin.py +++ b/accounts/admin.py @@ -1,25 +1,52 @@ from django.contrib import admin + from .models import Account + @admin.register(Account) class AccountAdmin(admin.ModelAdmin): - list_display = ('id', 'total_donations_in_usd', 'total_donations_out_usd', 'total_matching_pool_allocations_usd', 'donors_count') - search_fields = ('id',) # Allow searching by account address - list_filter = ('total_donations_in_usd', 'total_donations_out_usd') # Filter by donation amounts - ordering = ('-total_donations_in_usd',) # Default ordering + list_display = ( + "id", + "total_donations_in_usd", + "total_donations_out_usd", + "total_matching_pool_allocations_usd", + "donors_count", + ) + search_fields = ("id",) # Allow searching by account address + list_filter = ( + "total_donations_in_usd", + "total_donations_out_usd", + ) # Filter by donation amounts + ordering = ("-total_donations_in_usd",) # Default ordering # Optionally, format decimal fields for better readability in the admin def total_donations_in_usd_display(self, obj): return "${:,.2f}".format(obj.total_donations_in_usd) - total_donations_in_usd_display.admin_order_field = 'total_donations_in_usd' - total_donations_in_usd_display.short_description = 'Total Donations Received (USD)' + + total_donations_in_usd_display.admin_order_field = "total_donations_in_usd" + total_donations_in_usd_display.short_description = "Total Donations Received (USD)" def total_donations_out_usd_display(self, obj): return "${:,.2f}".format(obj.total_donations_out_usd) - total_donations_out_usd_display.admin_order_field = 'total_donations_out_usd' - total_donations_out_usd_display.short_description = 'Total Donations Sent (USD)' + + total_donations_out_usd_display.admin_order_field = "total_donations_out_usd" + total_donations_out_usd_display.short_description = "Total Donations Sent (USD)" def total_matching_pool_allocations_usd_display(self, obj): return "${:,.2f}".format(obj.total_matching_pool_allocations_usd) - total_matching_pool_allocations_usd_display.admin_order_field = 'total_matching_pool_allocations_usd' - total_matching_pool_allocations_usd_display.short_description = 'Total Matching Pool Allocations (USD)' + + total_matching_pool_allocations_usd_display.admin_order_field = ( + "total_matching_pool_allocations_usd" + ) + total_matching_pool_allocations_usd_display.short_description = ( + "Total Matching Pool Allocations (USD)" + ) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/activities/admin.py b/activities/admin.py index cab9f72..219e369 100644 --- a/activities/admin.py +++ b/activities/admin.py @@ -1,30 +1,52 @@ from django.contrib import admin from django.utils.html import format_html -from .models import Activity, Account + +from .models import Account, Activity + @admin.register(Activity) class ActivityAdmin(admin.ModelAdmin): - list_display = ('id', 'signer_address', 'receiver_address', 'timestamp', 'type', 'transaction_link', 'action_result') - list_filter = ('timestamp', 'type', 'signer', 'receiver') - search_fields = ('signer__id', 'receiver__id', 'tx_hash') - date_hierarchy = 'timestamp' - ordering = ('-timestamp',) + list_display = ( + "id", + "signer_address", + "receiver_address", + "timestamp", + "type", + "transaction_link", + "action_result", + ) + list_filter = ("timestamp", "type", "signer", "receiver") + search_fields = ("signer__id", "receiver__id", "tx_hash") + date_hierarchy = "timestamp" + ordering = ("-timestamp",) def signer_address(self, obj): return obj.signer.id - signer_address.admin_order_field = 'signer' - signer_address.short_description = 'Signer Address' + + signer_address.admin_order_field = "signer" + signer_address.short_description = "Signer Address" def receiver_address(self, obj): return obj.receiver.id - receiver_address.admin_order_field = 'receiver' - receiver_address.short_description = 'Receiver Address' + + receiver_address.admin_order_field = "receiver" + receiver_address.short_description = "Receiver Address" def transaction_link(self, obj): url = f"https://nearblocks.io?query={obj.tx_hash}" return format_html('{}', url, obj.tx_hash) - transaction_link.short_description = 'Transaction Hash' # Sets the column header + + transaction_link.short_description = "Transaction Hash" # Sets the column header # def action_result_summary(self, obj): # return "Has Result" if obj.action_result else "No Result" # action_result_summary.short_description = 'Action Result' + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/activities/migrations/0002_alter_activity_options.py b/activities/migrations/0002_alter_activity_options.py new file mode 100644 index 0000000..12fd19c --- /dev/null +++ b/activities/migrations/0002_alter_activity_options.py @@ -0,0 +1,17 @@ +# Generated by Django 5.0.4 on 2024-05-03 10:29 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("activities", "0001_initial"), + ] + + operations = [ + migrations.AlterModelOptions( + name="activity", + options={"verbose_name_plural": "Activities"}, + ), + ] diff --git a/activities/migrations/0003_alter_activity_unique_together.py b/activities/migrations/0003_alter_activity_unique_together.py new file mode 100644 index 0000000..e68f410 --- /dev/null +++ b/activities/migrations/0003_alter_activity_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 5.0.4 on 2024-05-06 13:39 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("activities", "0002_alter_activity_options"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="activity", + unique_together={("action_result", "type")}, + ), + ] diff --git a/activities/models.py b/activities/models.py index 62e31e8..b1bab6d 100644 --- a/activities/models.py +++ b/activities/models.py @@ -66,3 +66,5 @@ class Activity(models.Model): class Meta: verbose_name_plural = "Activities" + + unique_together = (("action_result", "type"),) diff --git a/appspec-dev.yml b/appspec-dev.yml new file mode 100644 index 0000000..348d01b --- /dev/null +++ b/appspec-dev.yml @@ -0,0 +1,19 @@ +version: 0.0 +os: linux +files: + - source: / + destination: /home/ec2-user/django-indexer-dev +hooks: + # BeforeInstall: + # - location: scripts/clean_destination_dev.sh + # timeout: 300 + # runas: ec2-user + AfterInstall: + - location: scripts/after_install_dev.sh + timeout: 300 + runas: ec2-user +# ApplicationStart: +# - location: scripts/application_start.sh +# timeout: 300 +# runas: root +# # ValidateService: diff --git a/appspec-testnet.yml b/appspec-testnet.yml new file mode 100644 index 0000000..dafa210 --- /dev/null +++ b/appspec-testnet.yml @@ -0,0 +1,20 @@ +version: 0.0 +os: linux +files: + - source: / + destination: /home/ec2-user/django-indexer-testnet +hooks: + # # Install: + # BeforeInstall: + # - location: scripts/clean_destination_testnet.sh + # timeout: 300 + # runas: ec2-user + AfterInstall: + - location: scripts/after_install_testnet.sh + timeout: 300 + runas: ec2-user +# ApplicationStart: +# - location: scripts/application_start.sh +# timeout: 300 +# runas: root +# # ValidateService: diff --git a/appspec.yml b/appspec.yml new file mode 100644 index 0000000..b6fb084 --- /dev/null +++ b/appspec.yml @@ -0,0 +1,16 @@ +version: 0.0 +os: linux +files: + - source: / + destination: /home/ec2-user/django-indexer +hooks: + # # Install: + AfterInstall: + - location: scripts/after_install.sh + timeout: 300 + runas: ec2-user +# ApplicationStart: +# - location: scripts/application_start.sh +# timeout: 300 +# runas: root +# # ValidateService: diff --git a/base/celery.py b/base/celery.py index ad77bb6..1aa4d8f 100644 --- a/base/celery.py +++ b/base/celery.py @@ -1,15 +1,16 @@ import os - -from django.conf import settings +import ssl from celery import Celery -import ssl +from celery.schedules import crontab +from django.conf import settings os.environ.setdefault("DJANGO_SETTINGS_MODULE", "base.settings") -app = Celery("indexer", -# broker=settings.CELERY_BROKER_URL, -# backend=settings.CELERY_RESULT_BACKEND +app = Celery( + "indexer", + # broker=settings.CELERY_BROKER_URL, + # backend=settings.CELERY_RESULT_BACKEND ) # SSL configurations for broker and backend @@ -22,3 +23,21 @@ app.config_from_object("django.conf:settings", namespace="CELERY") app.autodiscover_tasks() + +app.conf.beat_schedule = { + "update_account_statistics_every_5_minutes": { + "task": "indexer_app.tasks.update_account_statistics", + "schedule": crontab(minute="*/5"), # Executes every 5 minutes + "options": {"queue": "beat_tasks"}, + }, + "fetch_usd_prices_every_5_minutes": { + "task": "indexer_app.tasks.fetch_usd_prices", + "schedule": crontab(minute="*/5"), # Executes every 5 minutes + "options": {"queue": "beat_tasks"}, + }, +} + +app.conf.task_routes = { + "indexer_app.tasks.update_account_statistics": {"queue": "beat_tasks"}, + "indexer_app.tasks.fetch_usd_prices": {"queue": "beat_tasks"}, +} diff --git a/base/logging.py b/base/logging.py index cda9ae6..12bef01 100644 --- a/base/logging.py +++ b/base/logging.py @@ -1,3 +1,4 @@ import logging -logger = logging.getLogger("django") \ No newline at end of file +logger = logging.getLogger("django") +jobs_logger = logging.getLogger("jobs") diff --git a/base/settings.py b/base/settings.py index 37a0b34..5125bbf 100644 --- a/base/settings.py +++ b/base/settings.py @@ -30,13 +30,19 @@ # TODO: update before prod release SECRET_KEY = "django-insecure-=r_v_es6w6rxv42^#kc2hca6p%=fe_*cog_5!t%19zea!enlju" -ALLOWED_HOSTS = ["ec2-52-23-183-168.compute-1.amazonaws.com", "127.0.0.1"] +ALLOWED_HOSTS = [ + "ec2-100-27-57-47.compute-1.amazonaws.com", + "127.0.0.1", + "dev.potlock.io", + "test-dev.potlock.io", +] # Env vars AWS_ACCESS_KEY_ID = os.environ.get("PL_AWS_ACCESS_KEY_ID") AWS_SECRET_ACCESS_KEY = os.environ.get("PL_AWS_SECRET_ACCESS_KEY") # CACHALOT_ENABLED = strtobool(os.environ.get("PL_CACHALOT_ENABLED", "False")) # CACHALOT_TIMEOUT = os.environ.get("PL_CACHALOT_TIMEOUT") +COINGECKO_API_KEY = os.environ.get("PL_COINGECKO_API_KEY") DEBUG = strtobool(os.environ.get("PL_DEBUG", "False")) ENVIRONMENT = os.environ.get("PL_ENVIRONMENT", "local") LOG_LEVEL = os.getenv("PL_LOG_LEVEL", "INFO").upper() @@ -51,8 +57,18 @@ REDIS_PORT = os.environ.get("PL_REDIS_PORT", 6379) SENTRY_DSN = os.environ.get("PL_SENTRY_DSN") +POTLOCK_TLA = "potlock.testnet" if ENVIRONMENT == "testnet" else "potlock.near" + BLOCK_SAVE_HEIGHT = os.environ.get("BLOCK_SAVE_HEIGHT") +COINGECKO_URL = ( + "https://pro-api.coingecko.com/api/v3" + if COINGECKO_API_KEY + else "https://api.coingecko.com/api/v3" +) +# Number of hours around a given timestamp for querying historical prices +HISTORICAL_PRICE_QUERY_HOURS = 24 + # Application definition INSTALLED_APPS = [ @@ -63,6 +79,7 @@ "django.contrib.messages", "django.contrib.staticfiles", "rest_framework", + "corsheaders", # "cachalot", "celery", "api", @@ -80,10 +97,19 @@ REST_FRAMEWORK = { "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination", "PAGE_SIZE": DEFAULT_PAGE_SIZE, + "DEFAULT_THROTTLE_CLASSES": [ + # "rest_framework.throttling.UserRateThrottle", + "rest_framework.throttling.AnonRateThrottle", + ], + "DEFAULT_THROTTLE_RATES": { + # "user": "100/day", + "anon": "100/minute", + }, } MIDDLEWARE = [ + "corsheaders.middleware.CorsMiddleware", "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", @@ -113,6 +139,10 @@ WSGI_APPLICATION = "base.wsgi.application" +CORS_ALLOWED_ORIGINS = [ + "http://localhost:3000", +] + # REDIS / CACHE CONFIGS REDIS_SCHEMA = ( @@ -124,9 +154,13 @@ # Append SSL parameters as query parameters in the URL SSL_QUERY = "?ssl_cert_reqs=CERT_NONE" # TODO: UPDATE ACCORDING TO ENV (prod should require cert) -CELERY_BROKER_URL = f"{REDIS_BASE_URL}/0{SSL_QUERY}" +CELERY_BROKER_URL = f"{REDIS_BASE_URL}/0" +CELERY_RESULT_BACKEND = f"{REDIS_BASE_URL}/1" -CELERY_RESULT_BACKEND = f"{REDIS_BASE_URL}/1{SSL_QUERY}" + +if ENVIRONMENT != "local": + CELERY_BROKER_URL += SSL_QUERY + CELERY_RESULT_BACKEND += SSL_QUERY CELERY_BROKER_TRANSPORT_OPTIONS = { @@ -190,24 +224,25 @@ # LOGGING # Setting the log level from an environment variable -LOG_LEVEL = os.getenv('LOG_LEVEL', 'INFO').upper() +LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper() log_level = getattr(logging, LOG_LEVEL, logging.INFO) # print("LOG_LEVEL: ", LOG_LEVEL) +# Set log group name based on environment +log_group_name = f"django-indexer-{ENVIRONMENT}" + # Setting up the logging configuration LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, + "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"}, }, "handlers": { "console": { "level": log_level, "class": "logging.StreamHandler", - "formatter": "standard" + "formatter": "standard", }, }, "loggers": { @@ -221,11 +256,13 @@ "level": log_level, "propagate": False, }, - "": { # root logger + "jobs": { "handlers": ["console"], - "level": log_level - } - } + "level": log_level, + "propagate": False, + }, + "": {"handlers": ["console"], "level": log_level}, # root logger + }, } # Adding Watchtower logging handler for non-local environments @@ -235,72 +272,17 @@ LOGGING["handlers"]["watchtower"] = { "class": "watchtower.CloudWatchLogHandler", "boto3_client": boto3_logs_client, - "log_group_name": "django-indexer", + "log_group_name": log_group_name, "formatter": "standard", "level": log_level, } LOGGING["loggers"][""]["handlers"].append("watchtower") LOGGING["loggers"]["django"]["handlers"].append("watchtower") LOGGING["loggers"]["indexer"]["handlers"].append("watchtower") + LOGGING["loggers"]["jobs"]["handlers"].append("watchtower") -# log_level = getattr(logging, LOG_LEVEL, logging.INFO) -# print("LOG_LEVEL: ", LOG_LEVEL) -# # print("log_level: ", log_level) - -# if ENVIRONMENT != "local": -# AWS_REGION_NAME = "us-east-1" -# boto3_logs_client = boto3.client("logs", region_name=AWS_REGION_NAME) - - -# LOGGING = { -# "version": 1, -# "disable_existing_loggers": False, -# "root": { -# "level": log_level, -# # Adding the watchtower handler here causes all loggers in the project that -# # have propagate=True (the default) to send messages to watchtower. If you -# # wish to send only from specific loggers instead, remove "watchtower" here -# # and configure individual loggers below. -# # "handlers": ["watchtower", "console"], -# "handlers": ["console"], -# }, -# "handlers": { -# "console": { -# "class": "logging.StreamHandler", -# }, -# # "watchtower": { -# # "class": "watchtower.CloudWatchLogHandler", -# # "boto3_client": boto3_logs_client, -# # "log_group_name": "django-indexer", -# # # Decrease the verbosity level here to send only those logs to watchtower, -# # # but still see more verbose logs in the console. See the watchtower -# # # documentation for other parameters that can be set here. -# # "level": log_level, -# # }, -# }, -# "loggers": { -# # In the debug server (`manage.py runserver`), several Django system loggers cause -# # deadlocks when using threading in the logging handler, and are not supported by -# # watchtower. This limitation does not apply when running on production WSGI servers -# # (gunicorn, uwsgi, etc.), so we recommend that you set `propagate=True` below in your -# # production-specific Django settings file to receive Django system logs in CloudWatch. -# "django": {"level": log_level, "handlers": ["console"], "propagate": False} -# # Add any other logger-specific configuration here. -# }, -# } - -# if ENVIRONMENT != "local": -# LOGGING["handlers"]["watchtower"] = { -# "class": "watchtower.CloudWatchLogHandler", -# "boto3_client": boto3_logs_client, -# "log_group_name": "django-indexer", -# # Decrease the verbosity level here to send only those logs to watchtower, -# # but still see more verbose logs in the console. See the watchtower -# # documentation for other parameters that can be set here. -# "level": log_level, -# } - -# LOGGING["root"]["handlers"].append("watchtower") + +## SENTRY CONFIG sentry_sdk.init( environment=ENVIRONMENT, diff --git a/base/utils.py b/base/utils.py index 64bfb03..4c443a1 100644 --- a/base/utils.py +++ b/base/utils.py @@ -12,14 +12,15 @@ def format_to_near(yocto_amount: str): near_amount = int(yocto_amount) / (10**24) return near_amount + def convert_ns_to_utc(ns_timestamp): # Convert nanoseconds to seconds (float) seconds = ns_timestamp / 1e9 - + # Create a datetime object from the seconds (UTC) utc_datetime = datetime.utcfromtimestamp(seconds) - + # Format the datetime object as a string - formatted_date = utc_datetime.strftime('%Y-%m-%d %H:%M:%S') - - return formatted_date \ No newline at end of file + formatted_date = utc_datetime.strftime("%Y-%m-%d %H:%M:%S") + + return formatted_date diff --git a/donations/admin.py b/donations/admin.py index da83b44..f27994d 100644 --- a/donations/admin.py +++ b/donations/admin.py @@ -1,20 +1,43 @@ from django.contrib import admin from django.utils.dateformat import format from django.utils.timezone import localtime + from .models import Donation + +@admin.register(Donation) class DonationAdmin(admin.ModelAdmin): - list_display = [field.name for field in Donation._meta.get_fields() if field.name != 'id'] - list_display.extend(['donor_address', 'recipient_address', 'ft_address', 'referrer_address', 'chef_address']) # Add custom methods for addresses - search_fields = ('message', 'donor__address') # You can add more fields here - list_filter = ('donated_at', 'donor', 'pot') # Added default filters, you can add custom DateRangeFilter - date_hierarchy = 'donated_at' - ordering = ('-donated_at',) + list_display = [field.name for field in Donation._meta.get_fields()] + list_display.extend( + [ + "donor_address", + "recipient_address", + "ft_address", + "referrer_address", + "chef_address", + ] + ) # Add custom methods for addresses + search_fields = ( + "message", + "donor__id", + ) # Correct field name from 'donor__address' to 'donor__id' if 'id' is used in the model + list_filter = ("donated_at", "donor", "pot") + date_hierarchy = "donated_at" + ordering = ("-donated_at",) + + def get_queryset(self, request): + # Prefetch related donor, recipient, ft, referrer, and chef to prevent N+1 queries + return ( + super() + .get_queryset(request) + .prefetch_related("donor", "recipient", "ft", "referrer", "chef") + ) def donor_address(self, obj): return obj.donor.id - donor_address.admin_order_field = 'donor__address' # Allows column order sorting - donor_address.short_description = 'Donor Address' + + donor_address.admin_order_field = "donor__id" + donor_address.short_description = "Donor Address" def recipient_address(self, obj): return obj.recipient.id if obj.recipient else None @@ -29,10 +52,19 @@ def chef_address(self, obj): return obj.chef.id if obj.chef else None def formfield_for_dbfield(self, db_field, request, **kwargs): - field = super(DonationAdmin, self).formfield_for_dbfield(db_field, request, **kwargs) - if db_field.name in ['donated_at']: # Add more fields if needed - field.widget.format = '%d-%m-%Y %H:%M' # Change to your preferred format - field.widget.attrs.update({'class': 'vDateField', 'size': '20'}) + field = super(DonationAdmin, self).formfield_for_dbfield( + db_field, request, **kwargs + ) + if db_field.name == "donated_at": + field.widget.format = "%d-%m-%Y %H:%M" + field.widget.attrs.update({"class": "vDateField", "size": "20"}) return field -admin.site.register(Donation, DonationAdmin) + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/donations/migrations/0005_remove_donation_unique_pot_on_chain_id_and_more.py b/donations/migrations/0005_remove_donation_unique_pot_on_chain_id_and_more.py new file mode 100644 index 0000000..67e0e0b --- /dev/null +++ b/donations/migrations/0005_remove_donation_unique_pot_on_chain_id_and_more.py @@ -0,0 +1,35 @@ +# Generated by Django 5.0.4 on 2024-05-03 10:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0001_initial"), + ("donations", "0004_alter_donation_on_chain_id"), + ("pots", "0001_initial"), + ] + + operations = [ + migrations.RemoveConstraint( + model_name="donation", + name="unique_pot_on_chain_id", + ), + migrations.AddConstraint( + model_name="donation", + constraint=models.UniqueConstraint( + condition=models.Q(("pot__isnull", True)), + fields=("on_chain_id",), + name="unique_on_chain_id_when_pot_is_null", + ), + ), + migrations.AddConstraint( + model_name="donation", + constraint=models.UniqueConstraint( + condition=models.Q(("pot__isnull", False)), + fields=("on_chain_id", "pot"), + name="unique_on_chain_id_with_pot", + ), + ), + ] diff --git a/donations/migrations/0006_alter_donation_chef_fee.py b/donations/migrations/0006_alter_donation_chef_fee.py new file mode 100644 index 0000000..c22db14 --- /dev/null +++ b/donations/migrations/0006_alter_donation_chef_fee.py @@ -0,0 +1,20 @@ +# Generated by Django 5.0.4 on 2024-05-07 18:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("donations", "0005_remove_donation_unique_pot_on_chain_id_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="donation", + name="chef_fee", + field=models.CharField( + help_text="Chef fee.", max_length=64, null=True, verbose_name="chef fee" + ), + ), + ] diff --git a/donations/migrations/0007_auto_20240507_1857.py b/donations/migrations/0007_auto_20240507_1857.py new file mode 100644 index 0000000..1052a55 --- /dev/null +++ b/donations/migrations/0007_auto_20240507_1857.py @@ -0,0 +1,19 @@ +# Generated by Django 5.0.4 on 2024-05-07 18:57 + +from django.db import migrations + +from ..models import Donation + + +class Migration(migrations.Migration): + + def set_chef_fee_none(apps, schema_editor): + Donation.objects.update(chef_fee=None) + + dependencies = [ + ("donations", "0006_alter_donation_chef_fee"), + ] + + operations = [ + migrations.RunPython(set_chef_fee_none), + ] diff --git a/donations/models.py b/donations/models.py index 8c932e1..b101250 100644 --- a/donations/models.py +++ b/donations/models.py @@ -1,9 +1,17 @@ -from django import db +from datetime import timedelta +from decimal import Decimal + +import requests +from asgiref.sync import sync_to_async +from django.conf import settings from django.db import models from django.utils.translation import gettext_lazy as _ from accounts.models import Account +from base.logging import logger +from base.utils import format_date from pots.models import Pot +from tokens.models import Token, TokenHistoricalPrice class Donation(models.Model): @@ -54,7 +62,7 @@ class Donation(models.Model): help_text=_("Net amount in USD."), ) ft = models.ForeignKey( - Account, + Account, # should probably be Token on_delete=models.CASCADE, related_name="ft_donations", null=False, @@ -92,6 +100,7 @@ class Donation(models.Model): related_name="received_donations", null=True, help_text=_("Donation recipient."), + db_index=True, ) protocol_fee = models.CharField( _("protocol fee"), @@ -136,7 +145,7 @@ class Donation(models.Model): chef_fee = models.CharField( _("chef fee"), max_length=64, - null=False, + null=True, help_text=_("Chef fee."), ) chef_fee_usd = models.DecimalField( @@ -156,6 +165,123 @@ class Donation(models.Model): class Meta: constraints = [ models.UniqueConstraint( - fields=["on_chain_id", "pot"], name="unique_pot_on_chain_id" - ) + fields=["on_chain_id"], + condition=models.Q(pot__isnull=True), + name="unique_on_chain_id_when_pot_is_null", + ), + models.UniqueConstraint( + fields=["on_chain_id", "pot"], + condition=models.Q(pot__isnull=False), + name="unique_on_chain_id_with_pot", + ), ] + + def get_ft_token(self): + token, created = Token.objects.get_or_create( + id=self.ft, + defaults={"decimals": 12}, # Default values for new Token creation + ) + if created: + # TODO: fetch token metadata and add correct decimals, possibly other metadata + pass + return token + + async def fetch_usd_prices_async(self): + fetch_prices = sync_to_async(self.fetch_usd_prices) + await fetch_prices() + + ### Fetches USD prices for the Donation record and saves USD totals + def fetch_usd_prices(self): + # get existing values for stats adjustments later + existing_total_amount_usd = self.total_amount_usd + existing_net_amount_usd = self.net_amount_usd + existing_protocol_fee_usd = self.protocol_fee_usd + existing_referrer_fee_usd = self.referrer_fee_usd + existing_chef_fee_usd = self.chef_fee_usd + # first, see if there is a TokenHistoricalPrice within 1 day (or HISTORICAL_PRICE_QUERY_HOURS) of self.donated_at + token = self.get_ft_token() + time_window = timedelta(hours=settings.HISTORICAL_PRICE_QUERY_HOURS or 24) + token_prices = TokenHistoricalPrice.objects.filter( + token=token, + timestamp__gte=self.donated_at - time_window, + timestamp__lte=self.donated_at + time_window, + ) + existing_token_price = token_prices.first() + total_amount = token.format_price(self.total_amount) + net_amount = token.format_price(self.net_amount) + protocol_amount = token.format_price(self.protocol_fee) + referrer_amount = ( + None if not self.referrer_fee else token.format_price(self.referrer_fee) + ) + chef_amount = None if not self.chef_fee else token.format_price(self.chef_fee) + # chef_amount = token.format_price(self.chef_fee or "0") + if existing_token_price: + try: + price_usd = existing_token_price.price_usd + self.total_amount_usd = total_amount * price_usd + self.net_amount_usd = net_amount * price_usd + self.protocol_fee_usd = protocol_amount * price_usd + self.referrer_fee_usd = ( + None if not referrer_amount else referrer_amount * price_usd + ) + self.chef_fee_usd = None if not chef_amount else chef_amount * price_usd + self.save() + logger.info( + "USD prices calculated and saved using existing TokenHistoricalPrice" + ) + except Exception as e: + logger.error( + f"Failed to calculate and save USD prices using existing TokenHistoricalPrice: {e}" + ) + # TODO: update totals for relevant accounts + else: + # no existing price within acceptable time period; fetch from coingecko + try: + logger.info( + "No existing price within acceptable time period; fetching historical price..." + ) + endpoint = f"{settings.COINGECKO_URL}/coins/{self.ft.id}/history?date={format_date(self.donated_at)}&localization=false" + if settings.COINGECKO_API_KEY: + endpoint += f"&x_cg_pro_api_key={settings.COINGECKO_API_KEY}" + logger.info(f"coingecko endpoint: {endpoint}") + response = requests.get(endpoint) + logger.info(f"coingecko response: {response}") + if response.status_code == 429: + logger.warning("Coingecko rate limit exceeded") + price_data = response.json() + except Exception as e: + logger.warning(f"Failed to fetch coingecko price data: {e}") + logger.info(f"coingecko price data: {price_data}") + price_usd = ( + price_data.get("market_data", {}).get("current_price", {}).get("usd") + ) + logger.info(f"unit price: {price_usd}") + if price_usd: + try: + # convert price_usd to decimal + price_usd = Decimal(price_usd) + self.total_amount_usd = total_amount * price_usd + self.net_amount_usd = net_amount * price_usd + self.protocol_fee_usd = protocol_amount * price_usd + self.referrer_fee_usd = ( + None if not referrer_amount else referrer_amount * price_usd + ) + self.chef_fee_usd = ( + None if not chef_amount else chef_amount * price_usd + ) + self.save() + except Exception as e: + logger.error( + f"Failed to calculate and save USD prices using fetched price: {e}" + ) + # TODO: update totals for relevant accounts + try: + TokenHistoricalPrice.objects.create( + token=token, + price_usd=price_usd, + timestamp=self.donated_at, + ) + except Exception as e: + logger.warning( + f"Error creating TokenHistoricalPrice: {e} token: {token} price_usd: {price_usd}" + ) diff --git a/indexer_app/admin.py b/indexer_app/admin.py index 3b56656..e94e67a 100644 --- a/indexer_app/admin.py +++ b/indexer_app/admin.py @@ -1,7 +1,18 @@ from django.contrib import admin + from .models import BlockHeight + @admin.register(BlockHeight) class BlockHeightAdmin(admin.ModelAdmin): - list_display = ('id', 'block_height', 'updated_at') - ordering = ('-updated_at',) + list_display = ("id", "block_height", "updated_at") + ordering = ("-updated_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/indexer_app/handler.py b/indexer_app/handler.py index 6fbf24a..1981fb3 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -2,11 +2,16 @@ import json from datetime import datetime +from django.conf import settings from django.core.cache import cache from near_lake_framework import near_primitives +from base.utils import convert_ns_to_utc +from pots.utils import match_pot_factory_pattern, match_pot_subaccount_pattern + from .logging import logger from .utils import ( + handle_batch_donations, handle_default_list_status_change, handle_list_admin_removal, handle_list_registration_update, @@ -17,13 +22,12 @@ handle_new_pot, handle_new_pot_factory, handle_payout_challenge, + handle_payout_challenge_response, handle_pot_application, handle_pot_application_status_change, handle_set_payouts, handle_transfer_payout, ) -from base.utils import convert_ns_to_utc -from pots.utils import match_pot_factory_version_pattern async def handle_streamer_message(streamer_message: near_primitives.StreamerMessage): @@ -33,7 +37,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess "block_height", block_height ) # TODO: add custom timeout if it should be valid for longer than default (5 minutes) formatted_date = convert_ns_to_utc(block_timestamp) - logger.info(f"Block Height: {block_height}, Block Timestamp: {block_timestamp} ({formatted_date})") + logger.info( + f"Block Height: {block_height}, Block Timestamp: {block_timestamp} ({formatted_date})" + ) # if block_height == 111867204: # with open("indexer_outcome2.json", "w") as file: # file.write(f"{streamer_message}") @@ -41,8 +47,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess for shard in streamer_message.shards: for receipt_execution_outcome in shard.receipt_execution_outcomes: # we only want to proceed if it's a potlock tx and it succeeded.... (unreadable if statement?) + lists_contract = "lists." + settings.POTLOCK_TLA if not receipt_execution_outcome.receipt.receiver_id.endswith( - "potlock.near" + settings.POTLOCK_TLA ) or ( "SuccessReceiptId" not in receipt_execution_outcome.execution_outcome.outcome.status @@ -62,7 +69,7 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess try: parsed_log = json.loads(log[len("EVENT_JSON:") :]) except json.JSONDecodeError: - logging.warning( + logger.warning( f"Receipt ID: `{receipt_execution_outcome.receipt.receipt_id}`\nError during parsing logs from JSON string to dict" ) continue @@ -106,22 +113,28 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess args_dict = json.loads(decoded_text) except UnicodeDecodeError: # Handle case where the byte sequence cannot be decoded to UTF-8 - logger.warning(f"Cannot decode args to UTF-8 text: {decoded_bytes}") + logger.warning( + f"Cannot decode args to UTF-8 text: {decoded_bytes}" + ) args_dict = {} except json.JSONDecodeError: # Handle case where the text cannot be parsed as JSON - logger.warning(f"Decoded text is not valid JSON: {decoded_text}") + logger.warning( + f"Decoded text is not valid JSON: {decoded_text}" + ) args_dict = {} match method_name: case "new": - if match_pot_factory_version_pattern(receipt.receiver_id): + if match_pot_factory_pattern(receipt.receiver_id): logger.info(f"matched for factory pattern: {args_dict}") await handle_new_pot_factory( args_dict, receiver_id, created_at ) - else: - logger.info(f"new pot deployment: {args_dict}, {action}") + elif match_pot_subaccount_pattern(receipt.receiver_id): + logger.info( + f"new pot deployment: {args_dict}, {action}" + ) await handle_new_pot( args_dict, receiver_id, @@ -131,9 +144,11 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess created_at, ) break - + # TODO: update to use handle_apply method?? case "assert_can_apply_callback": - logger.info(f"application case: {args_dict}, {action}, {receipt}") + logger.info( + f"application case: {args_dict}, {action}, {receipt}" + ) await handle_pot_application( args_dict, receiver_id, @@ -145,7 +160,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "apply": - logger.info(f"application case 2: {args_dict}, {action}, {receipt}") + logger.info( + f"application case 2: {args_dict}, {action}, {receipt}" + ) await handle_pot_application( args_dict, receiver_id, @@ -157,21 +174,29 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "donate": # TODO: donate that produces result - logger.info(f"switching bazooka to knifee works!! donate his blood: {args_dict}, {receipt}, {action}, {log_data}") - await handle_new_donations( - args_dict, - receiver_id, - signer_id, - "direct", - receipt, - status_obj, - log_data, - created_at, + logger.info( + f"switching bazooka to knifee works!! donate his blood: {args_dict}, {receipt}, {action}, {log_data}" ) + if len(log_data) > 1: + await handle_batch_donations( + receiver_id, signer_id, "direct", receipt, log_data + ) + else: + await handle_new_donations( + args_dict, + receiver_id, + signer_id, + "direct", + receipt, + status_obj, + log_data, + ) break case "handle_protocol_fee_callback": - logger.info(f"donations to pool incoming: {args_dict}, {receipt}, {receipt_execution_outcome}") + logger.info( + f"donations to pool incoming: {args_dict}, {receipt}, {receipt_execution_outcome}" + ) await handle_new_donations( args_dict, receiver_id, @@ -180,12 +205,13 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess receipt, status_obj, log_data, - created_at, ) break case "transfer_funds_callback": - logger.info(f"new version donations to pool incoming: {args_dict}, {action}") + logger.info( + f"new version donations to pool incoming: {args_dict}, {action}" + ) await handle_new_donations( args_dict, receiver_id, @@ -194,15 +220,16 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess receipt, status_obj, log_data, - created_at, ) break case ( "register_batch" ): # TODO: listen for create_registration event instead of method call - logger.info(f"registrations incoming: {args_dict}, {action}") - if receiver_id != "lists.potlock.near": + logger.info( + f"registrations incoming: {args_dict}, {action}" + ) + if receiver_id != lists_contract: break await handle_new_list_registration( args_dict, receiver_id, signer_id, receipt, status_obj @@ -210,7 +237,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "chef_set_application_status": - logger.info(f"application status change incoming: {args_dict}") + logger.info( + f"application status change incoming: {args_dict}" + ) await handle_pot_application_status_change( args_dict, receiver_id, signer_id, receipt, status_obj ) @@ -244,7 +273,22 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess case "challenge_payouts": logger.info(f"challenge payout: {args_dict}") await handle_payout_challenge( - args_dict, receiver_id, signer_id, receipt.receipt_id + args_dict, + receiver_id, + signer_id, + receipt.receipt_id, + created_at, + ) + break + + case "admin_update_payouts_challenge": + logger.info(f"challenge payout: {args_dict}") + await handle_payout_challenge_response( + args_dict, + receiver_id, + signer_id, + receipt.receipt_id, + created_at, ) break @@ -259,7 +303,7 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess "owner_remove_admins" ): # TODO: use update_admins event instead of method call to handle all cases logger.info(f"attempting to remove admins....: {args_dict}") - if receiver_id != "lists.potlock.near": + if receiver_id != lists_contract: break await handle_list_admin_removal( args_dict, receiver_id, signer_id, receipt.receipt_id @@ -268,14 +312,14 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess case "create_list": logger.info(f"creating list... {args_dict}, {action}") - if receiver_id != "lists.potlock.near": + if receiver_id != lists_contract: break await handle_new_list(signer_id, receiver_id, status_obj) break case "upvote": logger.info(f"up voting... {args_dict}") - if receiver_id != "lists.potlock.near": + if receiver_id != lists_contract: break await handle_list_upvote( args_dict, receiver_id, signer_id, receipt.receipt_id @@ -284,8 +328,6 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess # TODO: handle remove upvote except Exception as e: - logger.warning( - f"Error during parsing method call from JSON string to dict\n{e}" - ) + logger.error(f"Error in indexer handler:\n{e}") # with open("indexer_error.txt", "a") as file: # file.write(f"{e}\n") diff --git a/indexer_app/migrations/0002_blockheight_block_timestamp.py b/indexer_app/migrations/0002_blockheight_block_timestamp.py new file mode 100644 index 0000000..deba8d4 --- /dev/null +++ b/indexer_app/migrations/0002_blockheight_block_timestamp.py @@ -0,0 +1,22 @@ +# Generated by Django 5.0.4 on 2024-05-03 10:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("indexer_app", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="blockheight", + name="block_timestamp", + field=models.DateTimeField( + help_text="date equivalent of the block height.", + null=True, + verbose_name="block timestamp", + ), + ), + ] diff --git a/indexer_app/models.py b/indexer_app/models.py index f8c0a4b..ace12ec 100644 --- a/indexer_app/models.py +++ b/indexer_app/models.py @@ -12,6 +12,11 @@ class BlockHeight(models.Model): _("blockheight value"), help_text=_("the last blockheight saved to db."), ) + block_timestamp = models.DateTimeField( + _("block timestamp"), + help_text=_("date equivalent of the block height."), + null=True, + ) updated_at = models.DateTimeField( _("updated at"), help_text=_("block height last update at."), diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index ecf33cc..4d3f179 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -1,37 +1,58 @@ import asyncio +import logging from pathlib import Path +from billiard.exceptions import WorkerLostError from celery import shared_task +from celery.signals import task_revoked, worker_shutdown from django.conf import settings +from django.db import transaction +from django.db.models import Count, Q, Sum from near_lake_framework import LakeConfig, streamer +from accounts.models import Account +from donations.models import Donation from indexer_app.handler import handle_streamer_message +from pots.models import PotPayout from .logging import logger from .utils import cache_block_height, get_block_height -async def indexer(network: str, from_block: int, to_block: int): +async def indexer(from_block: int, to_block: int): """ Runs the lake indexer framework """ # Initialize lake indexer logger.info(f"from block: {from_block}") - lake_config = LakeConfig.mainnet() if network == "mainnet" else LakeConfig.testnet() + lake_config = ( + LakeConfig.testnet() + if settings.ENVIRONMENT == "testnet" + else LakeConfig.mainnet() + ) lake_config.start_block_height = ( - from_block if from_block else logger.info("Starting to index from latest block") + from_block + if from_block + else logger.info( + "Starting to index from latest block" + ) # TODO: wtf is this shitty code ) lake_config.aws_access_key_id = settings.AWS_ACCESS_KEY_ID lake_config.aws_secret_key = settings.AWS_SECRET_ACCESS_KEY _, streamer_messages_queue = streamer(lake_config) block_count = 0 - + while True: try: # streamer_message is the current block streamer_message = await streamer_messages_queue.get() block_count += 1 - await cache_block_height("current_block_height", streamer_message.block.header.height, block_count) # current block height + await cache_block_height( + "current_block_height", + streamer_message.block.header.height, + block_count, + streamer_message.block.header.timestamp, + ) # current block height await handle_streamer_message(streamer_message) except Exception as e: logger.error(f"Error in streamer_messages_queue: {e}") @@ -39,22 +60,124 @@ async def indexer(network: str, from_block: int, to_block: int): @shared_task def listen_to_near_events(): - logger.info("Listening to near events...") + logger.info("Listening to NEAR events...") loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) try: # Update below with desired network & block height - start_block = get_block_height('current_block_height') - # start_block = 104_963_982 + start_block = get_block_height("current_block_height") + # start_block = 112959664 logger.info(f"what's the start block, pray tell? {start_block-1}") - loop.run_until_complete(indexer("mainnet", start_block-1, None)) + loop.run_until_complete(indexer(start_block - 1, None)) + except WorkerLostError: + pass # don't log to Sentry finally: loop.close() -from celery.signals import task_revoked +# @worker_shutdown.connect +# def worker_shutdown_handler(sig, how, exitcode, **kwargs): +# if sig == 15: +# logger.info( +# "Celery worker shutdown initiated by signal 15 (SIGTERM)." +# ) # avoid logging to Sentry +# else: +# logger.error("Celery worker shutdown due to signal %d.", sig) + + +jobs_logger = logging.getLogger("jobs") + + +# @shared_task +# def fetch_usd_prices(): +# donations = Donation.objects.filter( +# Q(total_amount_usd__isnull=True) | Q(net_amount_usd__isnull=True) +# ) +# jobs_logger.info(f"Fetching USD prices for {donations.count()} donations...") +# loop = asyncio.get_event_loop() +# tasks = [loop.create_task(donation.fetch_usd_prices()) for donation in donations] +# loop.run_until_complete(asyncio.gather(*tasks)) + + +@shared_task +def fetch_usd_prices(): + donations = Donation.objects.filter( + Q(total_amount_usd__isnull=True) + | Q(net_amount_usd__isnull=True) + | Q(protocol_fee_usd__isnull=True) + | Q(referrer_fee__isnull=False, referrer_fee_usd__isnull=True) + | Q(chef_fee__isnull=False, chef_fee_usd__isnull=True) + ) + donations_count = donations.count() + jobs_logger.info(f"Fetching USD prices for {donations_count} donations...") + for donation in donations: + try: + donation.fetch_usd_prices() + except Exception as e: + jobs_logger.error( + f"Failed to fetch USD prices for donation {donation.id}: {e}" + ) + jobs_logger.info(f"USD prices fetched for {donations_count} donations.") + + +@shared_task +def update_account_statistics(): + + accounts = Account.objects.all() + accounts_count = accounts.count() + jobs_logger.info(f"Updating statistics for {accounts_count} accounts...") + for account in accounts: + try: + # jobs_logger.info(f"Updating statistics for account {account.id}...") + # donors count + account.donors_count = Donation.objects.filter(recipient=account).aggregate( + Count("donor", distinct=True) + )["donor__count"] + + # donations received usd + account.total_donations_in_usd = ( + Donation.objects.filter(recipient=account).aggregate( + Sum("total_amount_usd") + )["total_amount_usd__sum"] + or 0 + ) + + # donations sent usd + account.total_donations_out_usd = ( + Donation.objects.filter(donor=account).aggregate( + Sum("total_amount_usd") + )["total_amount_usd__sum"] + or 0 + ) + + # matching pool allocations usd + account.total_matching_pool_allocations_usd = ( + PotPayout.objects.filter( + recipient=account, paid_at__isnull=False + ).aggregate(Sum("amount_paid_usd"))["amount_paid_usd__sum"] + or 0 + ) + + # Save changes + account.save( + update_fields=[ + "donors_count", + "total_donations_in_usd", + "total_donations_out_usd", + "total_matching_pool_allocations_usd", + ] + ) + # jobs_logger.info(f"Account {account.id} statistics updated.") + except Exception as e: + jobs_logger.error( + f"Failed to update statistics for account {account.id}: {e}" + ) + jobs_logger.info(f"Account stats for {accounts.count()} accounts updated.") + @task_revoked.connect def on_task_revoked(request, terminated, signum, expired, **kwargs): - logger.info(f"Task {request.id} revoked; terminated={terminated}, signum={signum}, expired={expired}") + logger.info( + f"Task {request.id} revoked; terminated={terminated}, signum={signum}, expired={expired}" + ) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 9c3562b..5f394a3 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -1,17 +1,16 @@ import base64 import decimal import json -from datetime import date, datetime +from datetime import datetime import requests from django.conf import settings from django.core.cache import cache +from django.utils import timezone from near_lake_framework.near_primitives import ExecutionOutcome, Receipt from accounts.models import Account from activities.models import Activity -from base.logging import logger -from base.utils import format_date, format_to_near from donations.models import Donation from indexer_app.models import BlockHeight from lists.models import List, ListRegistration, ListUpvote @@ -22,10 +21,12 @@ PotFactory, PotPayout, PotPayoutChallenge, + PotPayoutChallengeAdminResponse, ) -from tokens.models import Token, TokenHistoricalPrice -GECKO_URL = "https://api.coingecko.com/api/v3" +from .logging import logger + +# GECKO_URL = "https://api.coingecko.com/api/v3" # TODO: move to settings async def handle_new_pot( @@ -36,152 +37,177 @@ async def handle_new_pot( receiptId: str, created_at: datetime, ): - logger.info("new pot deployment process... upsert accounts,") - - # Upsert accounts - owner, _ = await Account.objects.aget_or_create(id=data["owner"]) - signer, _ = await Account.objects.aget_or_create(id=signerId) - receiver, _ = await Account.objects.aget_or_create(id=receiverId) - - logger.info("upsert chef") - if data.get("chef"): - chef, _ = await Account.objects.aget_or_create(id=data["chef"]) - - # Create Pot object - logger.info("create pot....") - potObject = await Pot.objects.acreate( - id=receiver, - pot_factory_id=predecessorId, - deployer=signer, - deployed_at=created_at, - source_metadata=data["source_metadata"], - owner_id=data["owner"], - chef_id=data.get("chef"), - name=data["pot_name"], - description=data["pot_description"], - max_approved_applicants=data["max_projects"], - base_currency="near", - application_start=datetime.fromtimestamp(data["application_start_ms"] / 1000), - application_end=datetime.fromtimestamp(data["application_end_ms"] / 1000), - matching_round_start=datetime.fromtimestamp( - data["public_round_start_ms"] / 1000 - ), - matching_round_end=datetime.fromtimestamp(data["public_round_end_ms"] / 1000), - registry_provider=data["registry_provider"], - min_matching_pool_donation_amount=data["min_matching_pool_donation_amount"], - sybil_wrapper_provider=data["sybil_wrapper_provider"], - custom_sybil_checks=data.get("custom_sybil_checks"), - custom_min_threshold_score=data.get("custom_min_threshold_score"), - referral_fee_matching_pool_basis_points=data[ - "referral_fee_matching_pool_basis_points" - ], - referral_fee_public_round_basis_points=data[ - "referral_fee_public_round_basis_points" - ], - chef_fee_basis_points=data["chef_fee_basis_points"], - total_matching_pool="0", - matching_pool_balance="0", - matching_pool_donations_count=0, - total_public_donations="0", - public_donations_count=0, - cooldown_period_ms=None, - all_paid_out=False, - protocol_config_provider=data["protocol_config_provider"], - ) + try: - # Add admins to the Pot - if data.get("admins"): - for admin_id in data["admins"]: - admin, _ = await Account.objects.aget_or_create(id=admin_id) - potObject.admins.aadd(admin) - - # Create activity object - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=created_at, - type="Deploy_Pot", - action_result=data, - tx_hash=receiptId, - ) + logger.info("new pot deployment process... upsert accounts,") + + # Upsert accounts + owner, _ = await Account.objects.aget_or_create(id=data["owner"]) + signer, _ = await Account.objects.aget_or_create(id=signerId) + receiver, _ = await Account.objects.aget_or_create(id=receiverId) + + logger.info("upsert chef") + if data.get("chef"): + chef, _ = await Account.objects.aget_or_create(id=data["chef"]) + + # Create Pot object + logger.info("create pot....") + pot_defaults = { + "pot_factory_id": predecessorId, + "deployer": signer, + "deployed_at": created_at, + "source_metadata": data["source_metadata"], + "owner_id": data["owner"], + "chef_id": data.get("chef"), + "name": data["pot_name"], + "description": data["pot_description"], + "max_approved_applicants": data["max_projects"], + "base_currency": "near", + "application_start": datetime.fromtimestamp( + data["application_start_ms"] / 1000 + ), + "application_end": datetime.fromtimestamp( + data["application_end_ms"] / 1000 + ), + "matching_round_start": datetime.fromtimestamp( + data["public_round_start_ms"] / 1000 + ), + "matching_round_end": datetime.fromtimestamp( + data["public_round_end_ms"] / 1000 + ), + "registry_provider": data["registry_provider"], + "min_matching_pool_donation_amount": data[ + "min_matching_pool_donation_amount" + ], + "sybil_wrapper_provider": data["sybil_wrapper_provider"], + "custom_sybil_checks": data.get("custom_sybil_checks"), + "custom_min_threshold_score": data.get("custom_min_threshold_score"), + "referral_fee_matching_pool_basis_points": data[ + "referral_fee_matching_pool_basis_points" + ], + "referral_fee_public_round_basis_points": data[ + "referral_fee_public_round_basis_points" + ], + "chef_fee_basis_points": data["chef_fee_basis_points"], + "total_matching_pool": "0", + "matching_pool_balance": "0", + "matching_pool_donations_count": 0, + "total_public_donations": "0", + "public_donations_count": 0, + "cooldown_period_ms": None, + "all_paid_out": False, + "protocol_config_provider": data["protocol_config_provider"], + } + pot, created = await Pot.objects.aupdate_or_create( + id=receiver, defaults=pot_defaults + ) + + # Add admins to the Pot + if data.get("admins"): + for admin_id in data["admins"]: + admin, _ = await Account.objects.aget_or_create(id=admin_id) + pot.admins.aadd(admin) + + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receiptId, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=data, type="Deploy_Pot", defaults=defaults + ) + except Exception as e: + logger.error(f"Failed to handle new pot, Error: {e}") async def handle_new_pot_factory(data: dict, receiverId: str, created_at: datetime): - logger.info("upserting accounts...") + try: - # Upsert accounts - owner, _ = await Account.objects.aget_or_create( - id=data["owner"], - ) - protocol_fee_recipient_account, _ = await Account.objects.aget_or_create( - id=data["protocol_fee_recipient_account"], - ) + logger.info("upserting accounts...") - receiver, _ = await Account.objects.aget_or_create( - id=receiverId, - ) + # Upsert accounts + owner, _ = await Account.objects.aget_or_create( + id=data["owner"], + ) + protocol_fee_recipient_account, _ = await Account.objects.aget_or_create( + id=data["protocol_fee_recipient_account"], + ) - logger.info("creating factory....") - # Create Factory object - factory = await PotFactory.objects.acreate( - id=receiver, - owner=owner, - deployed_at=created_at, - source_metadata=data["source_metadata"], - protocol_fee_basis_points=data["protocol_fee_basis_points"], - protocol_fee_recipient=protocol_fee_recipient_account, - require_whitelist=data["require_whitelist"], - ) + receiver, _ = await Account.objects.aget_or_create( + id=receiverId, + ) - # Add admins to the PotFactory - if data.get("admins"): - for admin_id in data["admins"]: - admin, _ = await Account.objects.aget_or_create( - id=admin_id, - ) - await factory.admins.aadd(admin) + logger.info("creating factory....") + defaults = { + "owner": owner, + "deployed_at": created_at, + "source_metadata": data["source_metadata"], + "protocol_fee_basis_points": data["protocol_fee_basis_points"], + "protocol_fee_recipient": protocol_fee_recipient_account, + "require_whitelist": data["require_whitelist"], + } + # Create Factory object + factory, factory_created = await PotFactory.objects.aupdate_or_create( + id=receiver, defaults=defaults + ) + + # Add admins to the PotFactory + if data.get("admins"): + for admin_id in data["admins"]: + admin, _ = await Account.objects.aget_or_create( + id=admin_id, + ) + await factory.admins.aadd(admin) - # Add whitelisted deployers to the PotFactory - if data.get("whitelisted_deployers"): - for deployer_id in data["whitelisted_deployers"]: - deployer, _ = await Account.objects.aget_or_create(id=deployer_id) - await factory.whitelisted_deployers.aadd(deployer) + # Add whitelisted deployers to the PotFactory + if data.get("whitelisted_deployers"): + for deployer_id in data["whitelisted_deployers"]: + deployer, _ = await Account.objects.aget_or_create(id=deployer_id) + await factory.whitelisted_deployers.aadd(deployer) + except Exception as e: + logger.error(f"Failed to handle new pot Factory, Error: {e}") async def handle_new_list(signerId: str, receiverId: str, status_obj: ExecutionOutcome): # receipt = block.receipts().filter(receiptId=receiptId)[0] - data = json.loads( - base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") - ) + try: - logger.info(f"creating list..... {data}") - - listObject = await List.objects.acreate( - id=data["id"], - owner_id=data["owner"], - default_registration_status=data["default_registration_status"], - name=data["name"], - description=data["description"], - cover_image_url=data["cover_image_url"], - admin_only_registrations=data["admin_only_registrations"], - created_at=datetime.fromtimestamp(data["created_at"] / 1000), - updated_at=datetime.fromtimestamp(data["updated_at"] / 1000), - ) + data = json.loads( + base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") + ) + + logger.info(f"creating list..... {data}") + + listObject = await List.objects.acreate( + id=data["id"], + owner_id=data["owner"], + default_registration_status=data["default_registration_status"], + name=data["name"], + description=data["description"], + cover_image_url=data["cover_image_url"], + admin_only_registrations=data["admin_only_registrations"], + created_at=datetime.fromtimestamp(data["created_at"] / 1000), + updated_at=datetime.fromtimestamp(data["updated_at"] / 1000), + ) - logger.info("upserting involveed accts...") + logger.info("upserting involveed accts...") - await Account.objects.aget_or_create(id=data["owner"]) + await Account.objects.aget_or_create(id=data["owner"]) - await Account.objects.aget_or_create(id=signerId) + await Account.objects.aget_or_create(id=signerId) - await Account.objects.aget_or_create(id=receiverId) + await Account.objects.aget_or_create(id=receiverId) - if data.get("admins"): - for admin_id in data["admins"]: - admin_object, _ = await Account.objects.aget_or_create( - id=admin_id, - ) - await listObject.admins.aadd(admin_object) + if data.get("admins"): + for admin_id in data["admins"]: + admin_object, _ = await Account.objects.aget_or_create( + id=admin_id, + ) + await listObject.admins.aadd(admin_object) + except Exception as e: + logger.error(f"Failed to handle new list, Error: {e}") async def handle_new_list_registration( @@ -238,13 +264,15 @@ async def handle_new_list_registration( # Insert activity try: - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=insert_data[0]["submitted_at"], - type="Register_Batch", - action_result=reg_data, - tx_hash=receipt.receipt_id, + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": datetime.fromtimestamp(insert_data[0]["submitted_at"] / 1000), + "tx_hash": receipt.receipt_id, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=reg_data, type="Register_Batch", defaults=defaults ) except Exception as e: logger.error(f"Encountered error trying to insert activity: {e}") @@ -281,47 +309,61 @@ async def handle_pot_application( status_obj: ExecutionOutcome, created_at: datetime, ): - # receipt = block.receipts().filter(lambda receipt: receipt.receiptId == receiptId)[0] - result = status_obj.status.get("SuccessValue") - if not result: - return + try: + + # receipt = block.receipts().filter(lambda receipt: receipt.receiptId == receiptId)[0] + result = status_obj.status.get("SuccessValue") + if not result: + return - appl_data = json.loads(base64.b64decode(result).decode("utf-8")) - logger.info(f"new pot application data: {data}, {appl_data}") + appl_data = json.loads(base64.b64decode(result).decode("utf-8")) + logger.info(f"new pot application data: {data}, {appl_data}") - # Update or create the account - project, _ = await Account.objects.aget_or_create( - id=data["project_id"], - ) + # Update or create the account + project, _ = await Account.objects.aget_or_create( + id=data["project_id"], + ) - signer, _ = await Account.objects.aget_or_create( - id=signerId, - ) + signer, _ = await Account.objects.aget_or_create( + id=signerId, + ) - # Create the PotApplication object - logger.info("creating application.......") - application = await PotApplication.objects.acreate( - pot_id=receiverId, - applicant=project, - message=appl_data["message"], - submitted_at=datetime.fromtimestamp(appl_data["submitted_at"] / 1000), - updated_at=created_at, - status=appl_data["status"], - tx_hash=receipt.receipt_id, - ) + # Create the PotApplication object + logger.info("creating application.......") + appl_defaults = { + "message": appl_data["message"], + "submitted_at": datetime.fromtimestamp(appl_data["submitted_at"] / 1000), + "updated_at": created_at, + "status": appl_data["status"], + "tx_hash": receipt.receipt_id, + } + application, application_created = ( + await PotApplication.objects.aupdate_or_create( + applicant=project, + pot_id=receiverId, + defaults=appl_defaults, + ) + ) - # Create the activity object - logger.info("creating activity for action....") - await Activity.objects.acreate( - signer=signer, - receiver_id=receiverId, - timestamp=application.submitted_at, - type="Submit_Application", - action_result=appl_data, - tx_hash=receipt.receipt_id, - ) + # Create the activity object + logger.info("creating activity for action....") + + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receipt.receipt_id, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=appl_data, type="Submit_Application", defaults=defaults + ) - logger.info("PotApplication and Activity created successfully.") + logger.info( + f"PotApplication and Activity created successfully, {activity_created}" + ) + except Exception as e: + logger.error(f"Failed to handle pot application, Error: {e}") async def handle_pot_application_status_change( @@ -331,325 +373,253 @@ async def handle_pot_application_status_change( receipt: Receipt, status_obj: ExecutionOutcome, ): - logger.info(f"pot application update data: {data}, {receiverId}") + try: - # receipt = next(receipt for receipt in block.receipts() if receipt.receiptId == receiptId) - update_data = json.loads( - base64.b64decode(status_obj.status["SuccessValue"]).decode("utf-8") - ) + logger.info(f"pot application update data: {data}, {receiverId}") - # Retrieve the PotApplication object - appl = await PotApplication.objects.filter( - applicant_id=data["project_id"] - ).afirst() # TODO: handle this being None - - # Create the PotApplicationReview object - logger.info(f"create review...... {appl}") - updated_at = datetime.fromtimestamp(update_data.get("updated_at") / 1000) - await PotApplicationReview.objects.acreate( - application_id=appl.id, - reviewer_id=signerId, - notes=update_data.get("review_notes"), - status=update_data["status"], - reviewed_at=updated_at, - tx_hash=receipt.receipt_id, - ) + # receipt = next(receipt for receipt in block.receipts() if receipt.receiptId == receiptId) + update_data = json.loads( + base64.b64decode(status_obj.status["SuccessValue"]).decode("utf-8") + ) - # Update the PotApplication object - await PotApplication.objects.filter(applicant_id=data["project_id"]).aupdate( - **{"status": update_data["status"], "updated_at": updated_at} - ) + # Retrieve the PotApplication object + appl = await PotApplication.objects.filter( + applicant_id=data["project_id"] + ).afirst() + + if not appl: + logger.error( + f"PotApplication object not found for project_id: {data['project_id']}" + ) + return + + # Create the PotApplicationReview object + logger.info(f"create review...... {appl}") + updated_at = datetime.fromtimestamp(update_data.get("updated_at") / 1000) + + defaults = { + "notes": update_data.get("review_notes"), + "status": update_data["status"], + "tx_hash": receipt.receipt_id, + } + + await PotApplicationReview.objects.aupdate_or_create( + application_id=appl.id, + reviewer_id=signerId, + reviewed_at=updated_at, + defaults=defaults, + ) + + # Update the PotApplication object + await PotApplication.objects.filter(applicant_id=data["project_id"]).aupdate( + **{"status": update_data["status"], "updated_at": updated_at} + ) - logger.info("PotApplicationReview and PotApplication updated successfully.") + logger.info("PotApplicationReview and PotApplication updated successfully.") + except Exception as e: + logger.error(f"Failed to change pot application status, Error: {e}") async def handle_default_list_status_change( data: dict, receiverId: str, status_obj: ExecutionOutcome ): - logger.info(f"update project data: {data}, {receiverId}") + try: - result_data = json.loads( - base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") - ) + logger.info(f"update project data: {data}, {receiverId}") - list_id = data.get("registration_id") - list_update = { - "name": result_data["name"], - "owner_id": result_data["owner"], - "default_registration_status": result_data["default_registration_status"], - "admin_only_registrations": result_data["admin_only_registrations"], - "updated_at": result_data["updated_at"], - } - if result_data.get("description"): - list_update["description"] = result_data["description"] - if result_data.get("cover_image_url"): - list_update["cover_image_url"] = result_data["cover_image_url"] + result_data = json.loads( + base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") + ) + + list_id = data.get("registration_id") + list_update = { + "name": result_data["name"], + "owner_id": result_data["owner"], + "default_registration_status": result_data["default_registration_status"], + "admin_only_registrations": result_data["admin_only_registrations"], + "updated_at": result_data["updated_at"], + } + if result_data.get("description"): + list_update["description"] = result_data["description"] + if result_data.get("cover_image_url"): + list_update["cover_image_url"] = result_data["cover_image_url"] - await List.objects.filter(id=list_id).aupdate(**list_update) + await List.objects.filter(id=list_id).aupdate(**list_update) - logger.info("List updated successfully.") + logger.info("List updated successfully.") + except Exception as e: + logger.error(f"Failed to change list status, Error: {e}") async def handle_list_upvote( data: dict, receiverId: str, signerId: str, receiptId: str ): - logger.info(f"upvote list: {data}, {receiverId}") + try: - acct, _ = await Account.objects.aget_or_create( - id=signerId, - ) + logger.info(f"upvote list: {data}, {receiverId}") - created_at = datetime.now() + acct, _ = await Account.objects.aget_or_create( + id=signerId, + ) - await ListUpvote.objects.acreate( - list_id=data.get("list_id") or receiverId, - account_id=signerId, - created_at=created_at, - ) + created_at = datetime.now() - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=created_at, - type="Upvote", - action_result=data, - tx_hash=receiptId, - ) + await ListUpvote.objects.aupdate_or_create( + list_id=data.get("list_id") or receiverId, + account_id=signerId, + ) + + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receiptId, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=data, type="Upvote", defaults=defaults + ) - logger.info("Upvote and activity records created successfully.") + logger.info( + f"Upvote and activity records created successfully. {activity_created}" + ) + except Exception as e: + logger.error(f"Failed to upvote list, Error: {e}") async def handle_set_payouts(data: dict, receiverId: str, receipt: Receipt): - logger.info(f"set payout data: {data}, {receiverId}") - payouts = data.get("payouts", []) - - insertion_data = [] - for payout in payouts: - # General question: should we register projects as accounts? - potPayout = { - "recipient_id": payout.get("project_id"), - "amount": payout.get("amount"), - "ft_id": payout.get("ft_id", "near"), - "tx_hash": receipt.receipt_id, - } - insertion_data.append(potPayout) + try: + + logger.info(f"set payout data: {data}, {receiverId}") + payouts = data.get("payouts", []) + + insertion_data = [] + for payout in payouts: + # General question: should we register projects as accounts? + potPayout = { + "recipient_id": payout.get("project_id"), + "amount": payout.get("amount"), + "ft_id": payout.get("ft_id", "near"), + "tx_hash": receipt.receipt_id, + } + insertion_data.append(potPayout) - await PotPayout.objects.abulk_create(insertion_data) + await PotPayout.objects.abulk_create(insertion_data, ignore_conflicts=True) + except Exception as e: + logger.error(f"Failed to set payouts, Error: {e}") async def handle_transfer_payout( data: dict, receiverId: str, receiptId: str, created_at: datetime ): - data = data["payout"] - logger.info(f"fulfill payout data: {data}, {receiverId}") - payout = { - "recipient_id": data["project_id"], - "amount": data["amount"], - "paid_at": data.get("paid_at", created_at), - "tx_hash": receiptId, - } - await PotPayout.objects.filter(recipient_id=data["project_id"]).aupdate(**payout) + try: + + data = data["payout"] + logger.info(f"fulfill payout data: {data}, {receiverId}") + payout = { + "recipient_id": data["project_id"], + "amount": data["amount"], + "paid_at": data.get("paid_at", created_at), + "tx_hash": receiptId, + } + await PotPayout.objects.filter(recipient_id=data["project_id"]).aupdate( + **payout + ) + except Exception as e: + logger.error(f"Failed to create payout data, Error: {e}") async def handle_payout_challenge( - data: dict, receiverId: str, signerId: str, receiptId: str + data: dict, receiverId: str, signerId: str, receiptId: str, created_at: datetime ): - logger.info(f"challenging payout..: {data}, {receiverId}") - created_at = datetime.now() - payoutChallenge = { - "challenger_id": signerId, - "pot_id": receiverId, - "created_at": created_at, - "message": data["reason"], - "tx_hash": receiptId, - } - await PotPayoutChallenge.objects.acreate(**payoutChallenge) - - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=created_at, - type="Challenge_Payout", - action_result=payoutChallenge, - tx_hash=receiptId, - ) + try: + logger.info(f"challenging payout..: {data}, {receiverId}") + payoutChallenge = { + "created_at": created_at, + "message": data["reason"], + "tx_hash": receiptId, + } + await PotPayoutChallenge.objects.aupdate_or_create( + challenger_id=signerId, pot_id=receiverId, defaults=payoutChallenge + ) -async def handle_list_admin_removal(data, receiverId, signerId, receiptId): - logger.info(f"removing admin...: {data}, {receiverId}") - list_obj = await List.objects.aget(id=data["list_id"]) - - for acct in data["admins"]: - list_obj.admins.remove({"admins_id": acct}) # ?? - - activity = { - "signer_id": signerId, - "receiver_id": receiverId, - "timestamp": datetime.now(), - "type": "Remove_List_Admin", - "tx_hash": receiptId, - } + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receiptId, + } - await Activity.objects.acreate(**activity) + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=payoutChallenge, type="Challenge_Payout", defaults=defaults + ) + except Exception as e: + logger.error(f"Failed to create payoutchallenge, Error: {e}") -# TODO: Need to abstract some actions. -async def handle_batch_donations( - receiverId: str, - signerId: str, - actionName: str, - receipt_obj: Receipt, - log_data: list, +async def handle_payout_challenge_response( + data: dict, receiverId: str, signerId: str, receiptId: str, created_at: datetime ): - logger.info("BAtch Transaction for donation...") - for event_data in log_data: - donation_data = event_data["donation"] - net_amount = int(donation_data["total_amount"]) - int( - donation_data["protocol_fee"] - ) - logger.info(f"Donation data: {donation_data}, {net_amount}") - # insert donate contract which is the receiver id(because of activitry relationship mainly) - donate_contract, _ = await Account.objects.aget_or_create(id=receiverId) - donated_at = datetime.fromtimestamp( - (donation_data.get("donated_at") or donation_data.get("donated_at_ms")) - / 1000 + try: + logger.info(f"responding to payout challenge..: {data}, {receiverId}") + response_defaults = { + "admin": signerId, + "message": data.get("notes"), + "resolved": data.get("resolve_challenge"), + "tx_hash": receiptId, + } + await PotPayoutChallengeAdminResponse.objects.aupdate_or_create( + challenger_id=data["challenger_id"], + pot_id=receiverId, + created_at=created_at, + defaults=response_defaults, ) + except Exception as e: + logger.error(f"Failed to handle admin challeneg response, Error: {e}") - # Upsert donor account - donor, _ = await Account.objects.aget_or_create(id=donation_data["donor_id"]) - - recipient = None - if donation_data.get("recipient_id"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["recipient_id"] - ) - else: - if not donation_data.get("matching_pool"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["project_id"] - ) - if donation_data.get("referrer_id"): - referrer, _ = await Account.objects.aget_or_create( - id=donation_data["referrer_id"] - ) +async def handle_list_admin_removal(data, receiverId, signerId, receiptId): + try: - # Upsert token account - token_acct, _ = await Account.objects.aget_or_create( - id=(donation_data.get("ft_id") or "near") - ) + logger.info(f"removing admin...: {data}, {receiverId}") + list_obj = await List.objects.aget(id=data["list_id"]) - # Upsert token - try: - token = await Token.objects.aget(id=token_acct) - except Token.DoesNotExist: - # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common - token = await Token.objects.acreate(id=token_acct, decimals=12) + for acct in data["admins"]: + list_obj.admins.remove({"admins_id": acct}) # maybe check - # Fetch historical token data - # late_p = await token.get_most_recent_price() - try: - logger.info("fetching historical price...") - endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" - response = requests.get(endpoint) - price_data = response.json() - unit_price = ( - price_data.get("market_data", {}).get("current_price", {}).get("usd") - ) - logger.info(f"the usd price is what, {unit_price}") - await TokenHistoricalPrice.objects.acreate( - token=token, - price_usd=unit_price, - ) - except Exception as e: - logger.warning(f"api rate limit? {e}") - # TODO: NB: below method has not been tested - # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async - historical = await TokenHistoricalPrice.objects.aget( - token=token, - price_usd=unit_price, - ) - # print("fetched old price:", historical_price.price_usd) - unit_price = historical.price_usd + activity = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": datetime.now(), + "tx_hash": receiptId, + } - total_amount = donation_data["total_amount"] - net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) - - # Calculate USD amounts - totalnearAmount = format_to_near(total_amount) - netnearAmount = format_to_near(net_amount) - total_amount_usd = unit_price * totalnearAmount - net_amount_usd = unit_price * netnearAmount - - logger.info(f"inserting donations... {total_amount_usd}") - donation = await Donation.objects.acreate( - on_chain_id=donation_data["id"], - donor=donor, - total_amount=total_amount, - total_amount_usd=total_amount_usd, - net_amount_usd=net_amount_usd, - net_amount=net_amount, - ft=token_acct, - message=donation_data.get("message"), - donated_at=donated_at, - matching_pool=donation_data.get("matching_pool", False), - recipient=recipient, - protocol_fee=donation_data["protocol_fee"], - referrer=referrer if donation_data.get("referrer_id") else None, - referrer_fee=donation_data.get("referrer_fee"), - tx_hash=receipt_obj.receipt_id, + activity, activity_created = await Activity.objects.aupdate_or_create( + type="Remove_List_Admin", defaults=activity ) + except Exception as e: + logger.error(f"Failed to remove list admin, Error: {e}") - if actionName != "direct": - logger.info("selecting pot to make public donation update") - pot = await Pot.objects.aget(id=receiverId) - await Donation.objects.filter(id=donation.id).aupdate(**{"pot": pot}) - potUpdate = { - "total_public_donations": int(pot.total_public_donations or 0) - + int(total_amount), - } - if donation_data.get("matching_pool"): - potUpdate["total_matching_pool"] = ( - pot.total_matching_pool or 0 - ) + total_amount - potUpdate["matching_pool_donations_count"] = ( - pot.matching_pool_donations_count or 0 - ) + 1 - # accountUpdate = {} - else: - potUpdate["public_donations_count"] = ( - pot.public_donations_count or 0 - ) + 1 - await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) - - # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) - logger.info( - f"update totl donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" - ) - await Account.objects.filter(id=donor.id).aupdate( - **{ - "total_donations_out_usd": donor.total_donations_out_usd - + decimal.Decimal(total_amount_usd) - } - ) - if recipient: - acct = await Account.objects.aget(id=recipient.id) - logger.info(f"selected {acct} to perform donor count update") - acctUpdate = { - "donors_count": acct.donors_count + 1, - "total_donations_in_usd": acct.total_donations_in_usd - + decimal.Decimal(net_amount_usd), - } - await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) - # Insert activity record - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=donation.donated_at, - type="Donate_Direct", - action_result=donation_data, - tx_hash=receipt_obj.receipt_id, +# TODO: Need to abstract some actions. +async def handle_batch_donations( + receiverId: str, + signerId: str, + actionName: str, + receipt_obj: Receipt, + log_data: list, +): + logger.info("BAtch Transaction for donation...") + for event_data in log_data: + await handle_new_donations( + event_data["donation"], + receiverId, + signerId, + actionName, + receipt_obj, + status_obj=None, + log_data=[event_data], ) @@ -661,29 +631,19 @@ async def handle_new_donations( receipt_obj: Receipt, status_obj: ExecutionOutcome, log_data: list, - created_at: datetime, ): logger.info(f"new donation data: {data}, {receiverId}") + donate_contract_addr = "donate." + settings.POTLOCK_TLA if ( actionName == "direct" - ) and receiverId == "donate.potlock.near": # early pot donations followed similarly to direct donations i.e they returned result instead of events. + ) and receiverId == donate_contract_addr: # early pot donations followed similarly to direct donations i.e they returned result instead of events. logger.info("calling donate contract...") # Handle direct donation if not log_data: return - if len(log_data) > 1: - # log_data = [ - # x - # for x in log_data - # if x["donation"]["recipient_id"] == data["recipient_id"] - # ] - return await handle_batch_donations( - receiverId, signerId, actionName, receipt_obj, log_data - ) - logger.info(f"event after possible filtering: {log_data}") event_data = log_data[0] @@ -707,166 +667,126 @@ async def handle_new_donations( (donation_data.get("donated_at") or donation_data.get("donated_at_ms")) / 1000 ) - # Upsert donor account - donor, _ = await Account.objects.aget_or_create(id=donation_data["donor_id"]) - recipient = None - - if donation_data.get("recipient_id"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["recipient_id"] - ) - if donation_data.get("project_id"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["project_id"] - ) + try: - if donation_data.get("referrer_id"): - referrer, _ = await Account.objects.aget_or_create( - id=donation_data["referrer_id"] - ) + # Upsert donor account + donor, _ = await Account.objects.aget_or_create(id=donation_data["donor_id"]) + recipient = None - # Upsert token account - token_acct, _ = await Account.objects.aget_or_create( - id=(donation_data.get("ft_id") or "near") - ) + if donation_data.get("recipient_id"): + recipient, _ = await Account.objects.aget_or_create( + id=donation_data["recipient_id"] + ) + if donation_data.get("project_id"): + recipient, _ = await Account.objects.aget_or_create( + id=donation_data["project_id"] + ) - # Upsert token - try: - token = await Token.objects.aget(id=token_acct) - except Token.DoesNotExist: - # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common - token = await Token.objects.acreate(id=token_acct, decimals=12) + if donation_data.get("referrer_id"): + referrer, _ = await Account.objects.aget_or_create( + id=donation_data["referrer_id"] + ) - # Fetch historical token data - # late_p = await token.get_most_recent_price() - try: - logger.info("fetching historical price...") - endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" - response = requests.get(endpoint) - price_data = response.json() - unit_price = ( - price_data.get("market_data", {}).get("current_price", {}).get("usd") - ) - await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id - token=token, - price_usd=unit_price, + # Upsert token account + token_acct, _ = await Account.objects.aget_or_create( + id=(donation_data.get("ft_id") or "near") ) except Exception as e: - logger.warning(f"api rate limit? {e}") - # TODO: NB: below method has not been tested - # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async - historical = await TokenHistoricalPrice.objects.aget(token=token) - # print("fetched old price:", historical_price.price_usd) - unit_price = historical.price_usd - - total_amount = donation_data["total_amount"] - net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) - - # Calculate USD amounts - totalnearAmount = format_to_near(total_amount) - netnearAmount = format_to_near(net_amount) - total_amount_usd = unit_price * totalnearAmount - net_amount_usd = unit_price * netnearAmount - - logger.info(f"inserting donations... {total_amount_usd}") - if actionName == "direct": # - donation = await Donation.objects.acreate( - on_chain_id=donation_data["id"], - donor=donor, - total_amount=total_amount, - total_amount_usd=total_amount_usd, - net_amount_usd=net_amount_usd, - net_amount=net_amount, - ft=token_acct, - message=donation_data.get("message"), - donated_at=donated_at, - matching_pool=donation_data.get("matching_pool", False), - recipient=recipient, - protocol_fee=donation_data["protocol_fee"], - referrer=referrer if donation_data.get("referrer_id") else None, - referrer_fee=donation_data.get("referrer_fee"), - tx_hash=receipt_obj.receipt_id, - ) - - if actionName != "direct": - logger.info("selecting pot to make public donation update") - pot = await Pot.objects.aget(id=receiverId) - donation = await Donation.objects.acreate( - on_chain_id=donation_data["id"], - donor=donor, - pot=pot, - total_amount=total_amount, - total_amount_usd=total_amount_usd, - net_amount_usd=net_amount_usd, - net_amount=net_amount, - ft=token_acct, - message=donation_data.get("message"), - donated_at=donated_at, - matching_pool=donation_data.get("matching_pool", False), - recipient=recipient, - protocol_fee=donation_data["protocol_fee"], - referrer=referrer if donation_data.get("referrer_id") else None, - referrer_fee=donation_data.get("referrer_fee"), - tx_hash=receipt_obj.receipt_id, - ) - potUpdate = { - "total_public_donations": str( - int(pot.total_public_donations or 0) + int(total_amount) - ), - "total_public_donations_usd": int(pot.total_public_donations_usd or 0.0) - + total_amount_usd, - } - if donation_data.get("matching_pool"): - potUpdate["total_matching_pool"] = str( - int(pot.total_matching_pool or 0) + int(total_amount) - ) - potUpdate["total_matching_pool"] = ( - pot.total_matching_pool_usd or 0.0 - ) + total_amount_usd - potUpdate["matching_pool_donations_count"] = ( - pot.matching_pool_donations_count or 0 - ) + 1 - - if recipient: - await Account.objects.filter(id=recipient.id).aupdate( - **{ - "total_matching_pool_allocations_usd": recipient.total_matching_pool_allocations_usd - + total_amount_usd - } - ) + logger.error(f"Failed to create/get an account involved in donation: {e}") + + # # Upsert token + # try: + # token = await Token.objects.aget(id=token_acct) + # except Token.DoesNotExist: + # # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common + # token = await Token.objects.acreate(id=token_acct, decimals=12) + + # # Fetch historical token data + # # late_p = await token.get_most_recent_price() + # try: + # logger.info("fetching historical price...") + # logger.info(f"donated at: {donated_at}") + # endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" + # logger.info(f"endpoint: {endpoint}") + # response = requests.get(endpoint) + # logger.info(f"response: {response}") + # if response.status_code == 429: + # logger.error("Coingecko rate limit exceeded") + # price_data = response.json() + # except Exception as e: + # logger.error(f"Failed to fetch price data: {e}") + # logger.info(f"price data: {price_data}") + # unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") + # logger.info(f"unit price: {unit_price}") + # if unit_price: + # try: + # await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id + # token=token, + # price_usd=unit_price, + # timestamp=donated_at, + # ) + # except Exception as e: + # logger.error( + # f"Error creating TokenHistoricalPrice: {e} token: {token} unit_price: {unit_price}" + # ) + # # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async + # historical = await TokenHistoricalPrice.objects.aget(token=token) + # unit_price = historical.price_usd + + # total_amount = donation_data["total_amount"] + # net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) + + # # Calculate and format amounts + # total_near_amount = format_to_near(total_amount) + # net_near_amount = format_to_near(net_amount) + # total_amount_usd = None if not unit_price else unit_price * total_near_amount + # net_amount_usd = None if not unit_price else unit_price * net_near_amount - # accountUpdate = {} - else: - potUpdate["public_donations_count"] = (pot.public_donations_count or 0) + 1 + try: - await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) + total_amount = donation_data["total_amount"] - # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) - logger.info( - f"update totl donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" - ) - await Account.objects.filter(id=donor.id).aupdate( - **{ - "total_donations_out_usd": donor.total_donations_out_usd - + decimal.Decimal(total_amount_usd) - } - ) - if recipient: - acct = await Account.objects.aget(id=recipient.id) - logger.info(f"selected {acct} to perform donor count update") - acctUpdate = { - "donors_count": acct.donors_count + 1, - "total_donations_in_usd": acct.total_donations_in_usd - + decimal.Decimal(net_amount_usd), + logger.info(f"inserting donations... by {actionName}") + default_data = { + "donor": donor, + "total_amount": total_amount, + "total_amount_usd": None, # USD amounts will be added later (could be in pre-save hook) + "net_amount_usd": None, + "net_amount": net_amount, + "ft": token_acct, + "message": donation_data.get("message"), + "donated_at": donated_at, + "matching_pool": donation_data.get("matching_pool", False), + "recipient": recipient, + "protocol_fee": donation_data["protocol_fee"], + "referrer": referrer if donation_data.get("referrer_id") else None, + "referrer_fee": donation_data.get("referrer_fee"), + "tx_hash": receipt_obj.receipt_id, } - await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) - - # Insert activity record - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=donation.donated_at, - type=( + logger.info(f"default donation data: {default_data}") + + if actionName != "direct": + logger.info("selecting pot to make public donation update") + pot = await Pot.objects.aget(id=receiverId) + default_data["pot"] = pot + + donation, donation_created = await Donation.objects.aupdate_or_create( + on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data + ) + logger.info(f"Created donation? {donation_created}") + + # fetch USD prices + await donation.fetch_usd_prices_async() # might not need to await this? + + # # convert total_amount_usd and net_amount_usd from None + # if total_amount_usd is None: + # total_amount_usd = 0.0 + # if net_amount_usd is None: + # net_amount_usd = 0.0 + + logger.info(f"Created donation? {donation_created}") + # Insert or update activity record + activity_type = ( "Donate_Direct" if actionName == "direct" else ( @@ -874,19 +794,102 @@ async def handle_new_donations( if donation.matching_pool else "Donate_Pot_Public" ) - ), - action_result=donation_data, - tx_hash=receipt_obj.receipt_id, - ) - + ) + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": donation.donated_at, + "tx_hash": receipt_obj.receipt_id, + } + try: -async def cache_block_height(key: str, height: int, block_count: int) -> int: + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=donation_data, type=activity_type, defaults=defaults + ) + if activity_created: + logger.info(f"Activity created: {activity}") + else: + logger.info(f"Activity updated: {activity}") + except Exception as e: + logger.info(f"Failed to create Activity: {e}") + except Exception as e: + logger.error(f"Failed to create/update donation: {e}") + + ### COMMENTING OUT FOR NOW SINCE WE HAVE PERIODIC JOB RUNNING TO UPDATE ACCOUNT STATS (NB: DOESN'T CURRENTLY COVER POT STATS) + ### CAN ALWAYS ADD BACK IF DESIRED + # if donation_created: # only do stats updates if donation object was created + + # if actionName != "direct": + + # potUpdate = { + # "total_public_donations": str( + # int(pot.total_public_donations or 0) + int(total_amount) + # ), + # "total_public_donations_usd": int(pot.total_public_donations_usd or 0.0) + # + total_amount_usd, + # } + # if donation_data.get("matching_pool"): + # potUpdate["total_matching_pool"] = str( + # int(pot.total_matching_pool or 0) + int(total_amount) + # ) + # potUpdate["total_matching_pool"] = ( + # pot.total_matching_pool_usd or 0.0 + # ) + total_amount_usd + # potUpdate["matching_pool_donations_count"] = ( + # pot.matching_pool_donations_count or 0 + # ) + 1 + + # if recipient: + # await Account.objects.filter(id=recipient.id).aupdate( + # **{ + # "total_matching_pool_allocations_usd": recipient.total_matching_pool_allocations_usd + # + total_amount_usd + # } + # ) + + # # accountUpdate = {} + # else: + # potUpdate["public_donations_count"] = ( + # pot.public_donations_count or 0 + # ) + 1 + + # await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) + + # # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) + # logger.info( + # f"update total donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" + # ) + # await Account.objects.filter(id=donor.id).aupdate( + # **{ + # "total_donations_out_usd": donor.total_donations_out_usd + # + decimal.Decimal(total_amount_usd) + # } + # ) + # if recipient: + # acct = await Account.objects.aget(id=recipient.id) + # logger.info(f"selected {acct} to perform donor count update") + # acctUpdate = { + # "donors_count": acct.donors_count + 1, + # "total_donations_in_usd": acct.total_donations_in_usd + # + decimal.Decimal(net_amount_usd), + # } + # await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) + + +async def cache_block_height( + key: str, height: int, block_count: int, block_timestamp: int +) -> int: await cache.aset(key, height) # the cache os the default go to for the restart block, the db is a backup if the redis server crashes. if (block_count % int(settings.BLOCK_SAVE_HEIGHT or 400)) == 0: logger.info(f"saving daylight, {height}") await BlockHeight.objects.aupdate_or_create( - id=1, defaults={"block_height": height, "updated_at": datetime.now()} + id=1, + defaults={ + "block_height": height, + "block_timestamp": datetime.fromtimestamp(block_timestamp / 1000000000), + "updated_at": timezone.now(), + }, ) # better than ovverriding model's save method to get a singleton? we need only one entry return height diff --git a/lists/admin.py b/lists/admin.py index ae08c51..db244ed 100644 --- a/lists/admin.py +++ b/lists/admin.py @@ -1,22 +1,68 @@ from django.contrib import admin -from .models import List, ListUpvote, ListRegistration, Account + +from .models import Account, List, ListRegistration, ListUpvote + @admin.register(List) class ListAdmin(admin.ModelAdmin): - list_display = ('id', 'name', 'owner', 'default_registration_status', 'created_at', 'updated_at') - list_filter = ('created_at', 'updated_at', 'default_registration_status') - search_fields = ('name', 'owner__id') - ordering = ('-created_at',) + list_display = ( + "id", + "name", + "owner", + "default_registration_status", + "created_at", + "updated_at", + ) + list_filter = ("created_at", "updated_at", "default_registration_status") + search_fields = ("name", "owner__id") + ordering = ("-created_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(ListUpvote) class ListUpvoteAdmin(admin.ModelAdmin): - list_display = ('id', 'list', 'account', 'created_at') - list_filter = ('created_at',) - search_fields = ('list__name', 'account__id') + list_display = ("id", "list", "account", "created_at") + list_filter = ("created_at",) + search_fields = ("list__name", "account__id") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(ListRegistration) class ListRegistrationAdmin(admin.ModelAdmin): - list_display = ('id', 'list', 'registrant', 'registered_by', 'status', 'submitted_at', 'updated_at') - list_filter = ('status', 'submitted_at', 'updated_at') - search_fields = ('list__name', 'registrant__id', 'registered_by__id') - ordering = ('-submitted_at',) + list_display = ( + "id", + "list", + "registrant", + "registered_by", + "status", + "submitted_at", + "updated_at", + ) + list_filter = ("status", "submitted_at", "updated_at") + search_fields = ("list__name", "registrant__id", "registered_by__id") + ordering = ("-submitted_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/lists/migrations/0002_alter_listupvote_options_and_more.py b/lists/migrations/0002_alter_listupvote_options_and_more.py new file mode 100644 index 0000000..af8f5c2 --- /dev/null +++ b/lists/migrations/0002_alter_listupvote_options_and_more.py @@ -0,0 +1,22 @@ +# Generated by Django 5.0.4 on 2024-05-08 15:13 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0001_initial"), + ("lists", "0001_initial"), + ] + + operations = [ + migrations.AlterModelOptions( + name="listupvote", + options={"verbose_name_plural": "ListUpvotes"}, + ), + migrations.AlterUniqueTogether( + name="listupvote", + unique_together={("list", "account")}, + ), + ] diff --git a/lists/models.py b/lists/models.py index a3e93df..ad412d5 100644 --- a/lists/models.py +++ b/lists/models.py @@ -101,6 +101,11 @@ class ListUpvote(models.Model): help_text=_("Upvote creation date."), ) + class Meta: + verbose_name_plural = "List Upvotes" + + unique_together = (("list", "account"),) + class ListRegistration(models.Model): id = models.AutoField( diff --git a/pots/admin.py b/pots/admin.py index 24862fa..aca414b 100644 --- a/pots/admin.py +++ b/pots/admin.py @@ -1,37 +1,180 @@ +from django import forms from django.contrib import admin -from .models import PotFactory, Pot, PotApplication, PotPayout, PotPayoutChallenge, PotPayoutChallengeAdminResponse + +from accounts.models import Account + +from .models import ( + Pot, + PotApplication, + PotApplicationReview, + PotFactory, + PotPayout, + PotPayoutChallenge, + PotPayoutChallengeAdminResponse, +) + + +class PotFactoryForm(forms.ModelForm): + class Meta: + model = PotFactory + fields = "__all__" + + def __init__(self, *args, **kwargs): + super(PotFactoryForm, self).__init__(*args, **kwargs) + # Ensure self.instance is available before accessing it + if self.instance.pk: + # Set the queryset for the admins field to only include relevant accounts + self.fields["admins"].queryset = self.instance.admins.all() + # Set the queryset for the whitelisted_deployers field to only include relevant accounts + self.fields["whitelisted_deployers"].queryset = ( + self.instance.whitelisted_deployers.all() + ) + @admin.register(PotFactory) class PotFactoryAdmin(admin.ModelAdmin): - list_display = ('id', 'owner', 'deployed_at') - search_fields = ('id', 'owner__id') + form = PotFactoryForm + list_display = ("id", "owner", "deployed_at") + search_fields = ("id", "owner__id") + + def get_form(self, request, obj=None, **kwargs): + form = super(PotFactoryAdmin, self).get_form(request, obj, **kwargs) + if obj: + form.base_fields["admins"].queryset = obj.admins.all() + form.base_fields["whitelisted_deployers"].queryset = ( + obj.whitelisted_deployers.all() + ) + return form + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + + +class PotForm(forms.ModelForm): + class Meta: + model = Pot + fields = "__all__" + + def __init__(self, *args, **kwargs): + super(PotForm, self).__init__(*args, **kwargs) + # Ensure self.instance is available before accessing it + if self.instance.pk: + # Set the queryset for the admins field to only include relevant accounts + self.fields["admins"].queryset = self.instance.admins.all() + @admin.register(Pot) class PotAdmin(admin.ModelAdmin): - list_display = ('id', 'pot_factory', 'deployer', 'deployed_at', 'name') - search_fields = ('id', 'name', 'deployer__id') - list_filter = ('deployed_at',) + form = PotForm + list_display = ("id", "pot_factory", "deployer", "deployed_at", "name") + search_fields = ("id", "name", "deployer__id") + list_filter = ("deployed_at",) + + def get_form(self, request, obj=None, **kwargs): + form = super(PotAdmin, self).get_form(request, obj, **kwargs) + if obj: + form.base_fields["admins"].queryset = obj.admins.all() + return form + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotApplication) class PotApplicationAdmin(admin.ModelAdmin): - list_display = ('id', 'pot', 'applicant', 'status', 'submitted_at') - search_fields = ('pot__id', 'applicant__id') - list_filter = ('status', 'submitted_at') + list_display = ("id", "pot", "applicant", "status", "submitted_at") + search_fields = ("pot__id", "applicant__id") + list_filter = ("status", "submitted_at") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + + +@admin.register(PotApplicationReview) +class PotApplicationReviewAdmin(admin.ModelAdmin): + list_display = ( + "id", + "application", + "reviewer", + "notes", + "status", + "reviewed_at", + "tx_hash", + ) + search_fields = ("application__id", "reviewer__id") + list_filter = ("status", "reviewed_at") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotPayout) class PotPayoutAdmin(admin.ModelAdmin): - list_display = ('id', 'pot', 'recipient', 'amount', 'paid_at') - search_fields = ('pot__id', 'recipient__id') - list_filter = ('paid_at',) + list_display = ("id", "pot", "recipient", "amount", "paid_at") + search_fields = ("pot__id", "recipient__id") + list_filter = ("paid_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotPayoutChallenge) class PotPayoutChallengeAdmin(admin.ModelAdmin): - list_display = ('id', 'challenger', 'pot', 'created_at') - search_fields = ('challenger__id', 'pot__id') - list_filter = ('created_at',) + list_display = ("id", "challenger", "pot", "created_at") + search_fields = ("challenger__id", "pot__id") + list_filter = ("created_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotPayoutChallengeAdminResponse) class PotPayoutChallengeAdminResponseAdmin(admin.ModelAdmin): - list_display = ('id', 'challenge', 'admin', 'created_at', 'resolved') - search_fields = ('admin__id', 'challenge__id') - list_filter = ('created_at', 'resolved') + list_display = ("id", "pot", "admin", "created_at", "resolved") + search_fields = ("admin__id", "challenge__id") + list_filter = ("created_at", "resolved") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/pots/migrations/0002_alter_potapplication_options_and_more.py b/pots/migrations/0002_alter_potapplication_options_and_more.py new file mode 100644 index 0000000..60f1eed --- /dev/null +++ b/pots/migrations/0002_alter_potapplication_options_and_more.py @@ -0,0 +1,73 @@ +# Generated by Django 5.0.4 on 2024-05-09 12:44 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0001_initial"), + ("pots", "0001_initial"), + ] + + operations = [ + migrations.AlterModelOptions( + name="potapplication", + options={"verbose_name_plural": "PotApplications"}, + ), + migrations.AlterModelOptions( + name="potapplicationreview", + options={"verbose_name_plural": "PotApplicationReviews"}, + ), + migrations.AlterModelOptions( + name="potpayoutchallenge", + options={"verbose_name_plural": "PayoutChallenges"}, + ), + migrations.AlterModelOptions( + name="potpayoutchallengeadminresponse", + options={"verbose_name_plural": "PotPayoutChallengeResponses"}, + ), + migrations.AddField( + model_name="potpayoutchallengeadminresponse", + name="challenger", + field=models.ForeignKey( + help_text="challenger being responded to.", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="payout_admin_responses", + to="accounts.account", + ), + ), + migrations.AddField( + model_name="potpayoutchallengeadminresponse", + name="pot", + field=models.ForeignKey( + help_text="Pot being challenged.", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="payout_responses", + to="pots.pot", + ), + ), + migrations.AlterUniqueTogether( + name="potpayoutchallengeadminresponse", + unique_together={("challenger", "pot", "created_at")}, + ), + migrations.AlterUniqueTogether( + name="potapplication", + unique_together={("pot", "applicant")}, + ), + migrations.AlterUniqueTogether( + name="potapplicationreview", + unique_together={("application", "reviewer", "reviewed_at")}, + ), + migrations.AlterUniqueTogether( + name="potpayoutchallenge", + unique_together={("challenger", "pot")}, + ), + migrations.RemoveField( + model_name="potpayoutchallengeadminresponse", + name="challenge", + ), + ] diff --git a/pots/models.py b/pots/models.py index 8941bec..a495aea 100644 --- a/pots/models.py +++ b/pots/models.py @@ -57,6 +57,9 @@ class PotFactory(models.Model): help_text=_("Require whitelist."), ) + class Meta: + verbose_name_plural = "Pot Factories" + class Pot(models.Model): id = models.OneToOneField( @@ -332,6 +335,11 @@ class PotApplication(models.Model): help_text=_("Transaction hash."), ) + class Meta: + verbose_name_plural = "Pot Applications" + + unique_together = (("pot", "applicant"),) + class PotApplicationReview(models.Model): id = models.AutoField( @@ -378,6 +386,11 @@ class PotApplicationReview(models.Model): help_text=_("Transaction hash."), ) + class Meta: + verbose_name_plural = "Pot Application Reviews" + + unique_together = (("application", "reviewer", "reviewed_at"),) + class PotPayout(models.Model): id = models.AutoField( @@ -468,6 +481,11 @@ class PotPayoutChallenge(models.Model): help_text=_("Challenge message."), ) + class Meta: + verbose_name_plural = "Payout Challenges" + + unique_together = (("challenger", "pot"),) + class PotPayoutChallengeAdminResponse(models.Model): id = models.AutoField( @@ -475,13 +493,22 @@ class PotPayoutChallengeAdminResponse(models.Model): primary_key=True, help_text=_("Admin response id."), ) - challenge = models.ForeignKey( - PotPayoutChallenge, + challenger = models.ForeignKey( + Account, on_delete=models.CASCADE, - related_name="admin_responses", - null=False, - help_text=_("Challenge responded to."), + related_name="payout_admin_responses", + null=True, + help_text=_("challenger being responded to."), + ) + + pot = models.ForeignKey( + Pot, + on_delete=models.CASCADE, + related_name="payout_responses", + null=True, + help_text=_("Pot being challenged."), ) + admin = models.ForeignKey( Account, on_delete=models.CASCADE, @@ -511,3 +538,8 @@ class PotPayoutChallengeAdminResponse(models.Model): null=False, help_text=_("Transaction hash."), ) + + class Meta: + verbose_name_plural = "Payout Challenge Responses" + + unique_together = (("challenger", "pot", "created_at"),) diff --git a/pots/utils.py b/pots/utils.py index 30d4c14..0d98146 100644 --- a/pots/utils.py +++ b/pots/utils.py @@ -1,6 +1,21 @@ import re +from django.conf import settings -def match_pot_factory_version_pattern(receiver): - pattern = r"^v\d+\.potfactory\.potlock\.near$" +BASE_PATTERN = ( + r"^potlock\.testnet$" + if settings.ENVIRONMENT == "testnet" + else r"v\d+\.potfactory\.potlock\.near$" +) + + +def match_pot_factory_pattern(receiver): + """Matches the base pot factory version pattern without a subaccount. NB: does not currently handle testnet factory.""" + pattern = f"^{BASE_PATTERN}" + return bool(re.match(pattern, receiver)) + + +def match_pot_subaccount_pattern(receiver): + """Matches the pot factory version pattern with a subaccount. NB: does not currently handle testnet factory.""" + pattern = f"^[a-zA-Z0-9_]+\.{BASE_PATTERN}" return bool(re.match(pattern, receiver)) diff --git a/pyproject.toml b/pyproject.toml index 2dd543a..1bb5530 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,7 @@ django-redis = "^5.4.0" gunicorn = "^22.0.0" sentry-sdk = {extras = ["django"], version = "^1.45.0"} watchtower = "^3.1.0" +django-cors-headers = "^4.3.1" [tool.poetry.group.dev.dependencies] black = "^24.3.0" diff --git a/scripts/after_install.sh b/scripts/after_install.sh new file mode 100644 index 0000000..152ad3a --- /dev/null +++ b/scripts/after_install.sh @@ -0,0 +1,67 @@ +#!/bin/bash +# TODO: deprecate this (move to _dev & _testnet files) +# Log output to a specific file +LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" + +echo -e "\n\n" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" +echo "Running after_install.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" + +# Load env vars +source /home/ec2-user/.bashrc + +# Set correct ownership recursively for project directory +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# Set the necessary permissions +sudo chmod -R 775 /home/ec2-user/django-indexer/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# Restart nginx to apply any configuration changes +sudo systemctl restart nginx +echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# Define the project directory +PROJECT_DIR="/home/ec2-user/django-indexer" + +# Navigate to the project directory +cd "$PROJECT_DIR" + +# Source the specific poetry virtual environment +source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" + +# Install dependencies using Poetry +echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +poetry install >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# Check if there are pending migrations and log the output +echo "Checking for pending migrations..." >> "$LOG_FILE" +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# Log the full output of showmigrations +echo "Checking for pending migrations..." >> "$LOG_FILE" +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# Check for unapplied migrations +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +if [ "$PENDING_MIGRATIONS" -gt 0 ]; then + echo "Migrations found; stopping services..." >> "$LOG_FILE" + sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat + + echo 'Applying migrations...' >> "$LOG_FILE" + poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + + echo 'Starting services...' >> "$LOG_FILE" + sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat +else + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 + sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat +fi + +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" \ No newline at end of file diff --git a/scripts/after_install_dev.sh b/scripts/after_install_dev.sh new file mode 100755 index 0000000..d9b54f1 --- /dev/null +++ b/scripts/after_install_dev.sh @@ -0,0 +1,67 @@ +#!/bin/bash +# Log output to a specific file +LOG_FILE="/home/ec2-user/django-indexer-dev/logs/deploy.log" + +echo -e "\n\n" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" +echo "Running after_install_dev.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" + +# Load env vars +export PL_ENVIRONMENT=dev +source /home/ec2-user/.bashrc + +# Set correct ownership recursively for project directory +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer-dev/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# Set the necessary permissions +sudo chmod -R 775 /home/ec2-user/django-indexer-dev/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# Restart nginx to apply any configuration changes +sudo systemctl restart nginx +echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# Define the project directory +PROJECT_DIR="/home/ec2-user/django-indexer-dev" + +# Navigate to the project directory +cd "$PROJECT_DIR" + +# Source the specific poetry virtual environment +source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" + +# Install dependencies using Poetry +echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +poetry install >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# Check if there are pending migrations and log the output +echo "Checking for pending migrations..." >> "$LOG_FILE" +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# Log the full output of showmigrations +echo "Checking for pending migrations..." >> "$LOG_FILE" +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# Check for unapplied migrations +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +if [ "$PENDING_MIGRATIONS" -gt 0 ]; then + echo "Migrations found; stopping services..." >> "$LOG_FILE" + sudo systemctl stop gunicorn-dev celery-indexer-worker-dev celery-beat-worker-dev celery-beat-dev + + echo 'Applying migrations...' >> "$LOG_FILE" + poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + + echo 'Starting services...' >> "$LOG_FILE" + sudo systemctl start gunicorn-dev celery-indexer-worker-dev celery-beat-worker-dev celery-beat-dev +else + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 + sudo systemctl restart gunicorn-dev celery-indexer-worker-dev celery-beat-worker-dev celery-beat-dev +fi + +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_dev.sh completed" >> "$LOG_FILE" diff --git a/scripts/after_install_testnet.sh b/scripts/after_install_testnet.sh new file mode 100755 index 0000000..94c517b --- /dev/null +++ b/scripts/after_install_testnet.sh @@ -0,0 +1,67 @@ +#!/bin/bash +# Log output to a specific file +LOG_FILE="/home/ec2-user/django-indexer-testnet/logs/deploy.log" + +echo -e "\n\n" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" +echo "Running after_install_testnet.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" + +# Load env vars +export PL_ENVIRONMENT=testnet +source /home/ec2-user/.bashrc + +# Set correct ownership recursively for project directory +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer-testnet/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# Set the necessary permissions +sudo chmod -R 775 /home/ec2-user/django-indexer-testnet/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# Restart nginx to apply any configuration changes +sudo systemctl restart nginx +echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# Define the project directory +PROJECT_DIR="/home/ec2-user/django-indexer-testnet" + +# Navigate to the project directory +cd "$PROJECT_DIR" + +# Source the specific poetry virtual environment +source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-AhfQkQzj-py3.11/bin/activate" + +# Install dependencies using Poetry +echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +poetry install >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# Check if there are pending migrations and log the output +echo "Checking for pending migrations..." >> "$LOG_FILE" +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# Log the full output of showmigrations +echo "Checking for pending migrations..." >> "$LOG_FILE" +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# Check for unapplied migrations +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +if [ "$PENDING_MIGRATIONS" -gt 0 ]; then + echo "Migrations found; stopping services..." >> "$LOG_FILE" + sudo systemctl stop gunicorn-testnet celery-indexer-worker-testnet celery-beat-worker-testnet celery-beat-testnet + + echo 'Applying migrations...' >> "$LOG_FILE" + poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + + echo 'Starting services...' >> "$LOG_FILE" + sudo systemctl start gunicorn-testnet celery-indexer-worker-testnet celery-beat-worker-testnet celery-beat-testnet +else + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 + sudo systemctl restart gunicorn-testnet celery-indexer-worker-testnet celery-beat-worker-testnet celery-beat-testnet +fi + +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_testnet.sh completed" >> "$LOG_FILE" diff --git a/scripts/clean_destination_dev.sh b/scripts/clean_destination_dev.sh new file mode 100644 index 0000000..638c035 --- /dev/null +++ b/scripts/clean_destination_dev.sh @@ -0,0 +1,8 @@ +#!/bin/bash +# Directory to clean +DEST_DIR="/home/ec2-user/django-indexer-dev" + +# Delete all contents of the destination directory +if [ -d "$DEST_DIR" ]; then + rm -rf "${DEST_DIR:?}/*" +fi \ No newline at end of file diff --git a/scripts/clean_destination_testnet.sh b/scripts/clean_destination_testnet.sh new file mode 100644 index 0000000..62b020a --- /dev/null +++ b/scripts/clean_destination_testnet.sh @@ -0,0 +1,8 @@ +#!/bin/bash +# Directory to clean +DEST_DIR="/home/ec2-user/django-indexer-testnet" + +# Delete all contents of the destination directory +if [ -d "$DEST_DIR" ]; then + rm -rf "${DEST_DIR:?}/*" +fi \ No newline at end of file diff --git a/tokens/admin.py b/tokens/admin.py index 0ca45d6..f783399 100644 --- a/tokens/admin.py +++ b/tokens/admin.py @@ -1,18 +1,40 @@ from django.contrib import admin + from .models import Token, TokenHistoricalPrice + @admin.register(Token) class TokenAdmin(admin.ModelAdmin): - list_display = ('id', 'decimals', 'get_most_recent_price') - search_fields = ('id',) + list_display = ("id", "decimals", "get_most_recent_price") + search_fields = ("id",) def get_most_recent_price(self, obj): price = obj.get_most_recent_price() return price.price_usd if price else None - get_most_recent_price.short_description = 'Most Recent Price (USD)' + + get_most_recent_price.short_description = "Most Recent Price (USD)" + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(TokenHistoricalPrice) class TokenHistoricalPriceAdmin(admin.ModelAdmin): - list_display = ('token', 'timestamp', 'price_usd') - search_fields = ('token__id',) - list_filter = ('timestamp',) + list_display = ("token", "timestamp", "price_usd") + search_fields = ("token__id",) + list_filter = ("timestamp",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/tokens/models.py b/tokens/models.py index aa4a309..1b4d071 100644 --- a/tokens/models.py +++ b/tokens/models.py @@ -1,3 +1,5 @@ +from decimal import Decimal + from django.db import models from django.utils import timezone from django.utils.translation import gettext_lazy as _ @@ -21,6 +23,11 @@ class Token(models.Model): def get_most_recent_price(self): return self.historical_prices.order_by("-timestamp").first() + def format_price(self, amount_str: str): + # Convert the string amount to a Decimal, then adjust by the token's decimal places + formatted_amount = Decimal(amount_str) / (Decimal("10") ** self.decimals) + return formatted_amount + class TokenHistoricalPrice(models.Model): token = models.ForeignKey(