From 7928088ab4fbacee254dbab80c35b9f86b5dbc87 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:59:02 -0400 Subject: [PATCH 001/127] add deploy workflow --- .github/ISSUE_TEMPLATE/workflows/deploy.yml | 66 +++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/workflows/deploy.yml diff --git a/.github/ISSUE_TEMPLATE/workflows/deploy.yml b/.github/ISSUE_TEMPLATE/workflows/deploy.yml new file mode 100644 index 0000000..6c57370 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/workflows/deploy.yml @@ -0,0 +1,66 @@ +name: Deploy to EC2 on Push + +on: + push: + branches: + - main + - dev + +jobs: + deploy: + runs-on: ubuntu-latest + environment: + name: ${{ github.ref == 'refs/heads/main' && 'prod' || 'dev' }} + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up SSH key + uses: webfactory/ssh-agent@v0.5.3 + with: + ssh-private-key: ${{ secrets.EC2_SSH_PRIVATE_KEY }} + + - name: Push new code to EC2 + run: | + rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }}:/path/to/your/project/ + echo "Code has been pushed to the EC2 instance." + + - name: Check for pending migrations + id: check_migrations + run: | + echo "Checking for pending migrations..." + pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /path/to/your/project/ && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") + echo "::set-output name=pending::${pending_migrations}" + if [ -z "$pending_migrations" ]; then + echo "No migrations found." + else: + echo "Migrations found, stopping services." + + - name: Stop services if migrations are pending + if: steps.check_migrations.outputs.pending + run: | + echo "Stopping services..." + ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "sudo systemctl stop gunicorn.service indexer.service" + + - name: Run migrations + if: steps.check_migrations.outputs.pending + run: | + echo "Running migrations..." + ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /path/to/your/project/ && source env/bin/activate && python manage.py migrate" + + - name: Run collectstatic + run: | + echo "Running collectstatic..." + ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /path/to/your/project/ && source env/bin/activate && python manage.py collectstatic --noinput" + + - name: Restart services if migrations were run + if: steps.check_migrations.outputs.pending + run: | + echo "Restarting services after migrations..." + ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service indexer.service" + + - name: Restart services if no migrations + if: steps.check_migrations.outputs.pending == '' + run: | + echo "Restarting services without migration..." + ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service" From c7569ec121cd90968ce8c3303237c27a22998615 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:59:59 -0400 Subject: [PATCH 002/127] fix workflows location --- .github/{ISSUE_TEMPLATE => }/workflows/deploy.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/{ISSUE_TEMPLATE => }/workflows/deploy.yml (100%) diff --git a/.github/ISSUE_TEMPLATE/workflows/deploy.yml b/.github/workflows/deploy.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/workflows/deploy.yml rename to .github/workflows/deploy.yml From 1087fd50e4483eb443f205775cdb70ddb12d57db Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 29 Apr 2024 13:16:51 -0400 Subject: [PATCH 003/127] fix ec2 directory path --- .github/workflows/deploy.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 6c57370..5c36140 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -22,14 +22,14 @@ jobs: - name: Push new code to EC2 run: | - rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }}:/path/to/your/project/ + rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }}:/home/ec2-user/django-indexer echo "Code has been pushed to the EC2 instance." - name: Check for pending migrations id: check_migrations run: | echo "Checking for pending migrations..." - pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /path/to/your/project/ && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") + pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") echo "::set-output name=pending::${pending_migrations}" if [ -z "$pending_migrations" ]; then echo "No migrations found." @@ -46,12 +46,12 @@ jobs: if: steps.check_migrations.outputs.pending run: | echo "Running migrations..." - ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /path/to/your/project/ && source env/bin/activate && python manage.py migrate" + ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py migrate" - name: Run collectstatic run: | echo "Running collectstatic..." - ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /path/to/your/project/ && source env/bin/activate && python manage.py collectstatic --noinput" + ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py collectstatic --noinput" - name: Restart services if migrations were run if: steps.check_migrations.outputs.pending From 4bd0940fd5283fda287c309b896035e769a6086a Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 29 Apr 2024 13:24:20 -0400 Subject: [PATCH 004/127] add workflow debug logs --- .github/workflows/deploy.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 5c36140..d344b78 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -20,6 +20,11 @@ jobs: with: ssh-private-key: ${{ secrets.EC2_SSH_PRIVATE_KEY }} + - name: Print environment variables + run: | + echo "EC2_SSH_HOST=${{ env.EC2_SSH_HOST }}" + echo "EC2_USER=${{ env.EC2_USER }}" + - name: Push new code to EC2 run: | rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }}:/home/ec2-user/django-indexer From 648e5b834f44abd158634c6a61718045fed480cf Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 29 Apr 2024 16:02:28 -0400 Subject: [PATCH 005/127] change env. to vars. --- .github/workflows/deploy.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index d344b78..6062239 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -22,8 +22,8 @@ jobs: - name: Print environment variables run: | - echo "EC2_SSH_HOST=${{ env.EC2_SSH_HOST }}" - echo "EC2_USER=${{ env.EC2_USER }}" + echo "EC2_SSH_HOST=${{ vars.EC2_SSH_HOST }}" + echo "EC2_USER=${{ vars.EC2_USER }}" - name: Push new code to EC2 run: | From 5f6c725e47f4ac0f47a8a991dcf55552cc6c8978 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 29 Apr 2024 16:03:15 -0400 Subject: [PATCH 006/127] change all env. to vars. --- .github/workflows/deploy.yml | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 6062239..593040c 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -20,21 +20,16 @@ jobs: with: ssh-private-key: ${{ secrets.EC2_SSH_PRIVATE_KEY }} - - name: Print environment variables - run: | - echo "EC2_SSH_HOST=${{ vars.EC2_SSH_HOST }}" - echo "EC2_USER=${{ vars.EC2_USER }}" - - name: Push new code to EC2 run: | - rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }}:/home/ec2-user/django-indexer + rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }}:/home/ec2-user/django-indexer echo "Code has been pushed to the EC2 instance." - name: Check for pending migrations id: check_migrations run: | echo "Checking for pending migrations..." - pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") + pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") echo "::set-output name=pending::${pending_migrations}" if [ -z "$pending_migrations" ]; then echo "No migrations found." @@ -45,27 +40,27 @@ jobs: if: steps.check_migrations.outputs.pending run: | echo "Stopping services..." - ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "sudo systemctl stop gunicorn.service indexer.service" + ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl stop gunicorn.service indexer.service" - name: Run migrations if: steps.check_migrations.outputs.pending run: | echo "Running migrations..." - ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py migrate" + ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py migrate" - name: Run collectstatic run: | echo "Running collectstatic..." - ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py collectstatic --noinput" + ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py collectstatic --noinput" - name: Restart services if migrations were run if: steps.check_migrations.outputs.pending run: | echo "Restarting services after migrations..." - ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service indexer.service" + ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service indexer.service" - name: Restart services if no migrations if: steps.check_migrations.outputs.pending == '' run: | echo "Restarting services without migration..." - ssh -o "StrictHostKeyChecking=no" ${{ env.EC2_USER }}@${{ env.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service" + ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service" From 152cb35ba26d064bdaf165d004202465c4305583 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 29 Apr 2024 17:13:08 -0400 Subject: [PATCH 007/127] add appspec --- appspec.yml | 16 ++++++++++++++++ scripts/after_install.sh | 10 ++++++++++ 2 files changed, 26 insertions(+) create mode 100644 appspec.yml create mode 100644 scripts/after_install.sh diff --git a/appspec.yml b/appspec.yml new file mode 100644 index 0000000..76a51b2 --- /dev/null +++ b/appspec.yml @@ -0,0 +1,16 @@ +version: 0.0 +os: linux +files: + - source: / + destination: /home/ec2-user/django-indexer +# hooks: +# # Install: +# AfterInstall: +# - location: scripts/after_install.sh +# timeout: 300 +# runas: root +# ApplicationStart: +# - location: scripts/application_start.sh +# timeout: 300 +# runas: root +# # ValidateService: diff --git a/scripts/after_install.sh b/scripts/after_install.sh new file mode 100644 index 0000000..2ebca6a --- /dev/null +++ b/scripts/after_install.sh @@ -0,0 +1,10 @@ +#!/bin/bash +echo 'run after_install.sh' >> /home/ec2-user/myrepo/deploy.log + +cd /home/ec2-user/django-indexer + +echo 'poetry install' >> /home/ec2-user/myrepo/deploy.log + +poetry shell + +poetry install >> /home/ec2-user/myrepo/deploy.log \ No newline at end of file From f740c640b1b60269966753a952f08ab276f82e35 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 09:46:28 -0400 Subject: [PATCH 008/127] update workflow to use codedeploy --- .github/workflows/deploy.yml | 139 +++++++++++++++++++++++------------ 1 file changed, 90 insertions(+), 49 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 593040c..9c958fe 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,66 +1,107 @@ +# This is a basic workflow to help you get started with Actions name: Deploy to EC2 on Push on: push: - branches: - - main - - dev + branches: [dev] +env: + AWS_REGION: "us-east-1" + +# Permission can be added at job level or workflow level +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout jobs: - deploy: + AssumeRoleAndCallIdentity: runs-on: ubuntu-latest - environment: - name: ${{ github.ref == 'refs/heads/main' && 'prod' || 'dev' }} steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Set up SSH key - uses: webfactory/ssh-agent@v0.5.3 + - name: Git clone the repository + uses: actions/checkout@v3 + - name: configure aws credentials + uses: aws-actions/configure-aws-credentials@v1.7.0 with: - ssh-private-key: ${{ secrets.EC2_SSH_PRIVATE_KEY }} - - - name: Push new code to EC2 + role-to-assume: arn:aws:iam::471112976510:role/GitHubAction-AssumeRoleWithAction + role-session-name: GitHub_to_AWS_via_FederatedOIDC + aws-region: ${{ env.AWS_REGION }} + # Hello from AWS: WhoAmI + - name: Sts GetCallerIdentity run: | - rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }}:/home/ec2-user/django-indexer - echo "Code has been pushed to the EC2 instance." + aws sts get-caller-identity - - name: Check for pending migrations - id: check_migrations + # Step 3 - check the application-name and deployment group name + - name: Create CodeDeploy Deployment + id: deploy run: | - echo "Checking for pending migrations..." - pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") - echo "::set-output name=pending::${pending_migrations}" - if [ -z "$pending_migrations" ]; then - echo "No migrations found." - else: - echo "Migrations found, stopping services." + aws deploy create-deployment \ + --application-name django-indexer \ + --deployment-group-name django-indexer-dev \ + --deployment-config-name CodeDeployDefault.AllAtOnce \ + --github-location repository=${{ github.repository }},commitId=${{ github.sha }} - - name: Stop services if migrations are pending - if: steps.check_migrations.outputs.pending - run: | - echo "Stopping services..." - ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl stop gunicorn.service indexer.service" +# name: Deploy to EC2 on Push - - name: Run migrations - if: steps.check_migrations.outputs.pending - run: | - echo "Running migrations..." - ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py migrate" +# on: +# push: +# branches: +# - main +# - dev - - name: Run collectstatic - run: | - echo "Running collectstatic..." - ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py collectstatic --noinput" +# jobs: +# deploy: +# runs-on: ubuntu-latest +# environment: +# name: ${{ github.ref == 'refs/heads/main' && 'prod' || 'dev' }} +# steps: +# - name: Checkout code +# uses: actions/checkout@v2 - - name: Restart services if migrations were run - if: steps.check_migrations.outputs.pending - run: | - echo "Restarting services after migrations..." - ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service indexer.service" +# - name: Set up SSH key +# uses: webfactory/ssh-agent@v0.5.3 +# with: +# ssh-private-key: ${{ secrets.EC2_SSH_PRIVATE_KEY }} - - name: Restart services if no migrations - if: steps.check_migrations.outputs.pending == '' - run: | - echo "Restarting services without migration..." - ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service" +# - name: Push new code to EC2 +# run: | +# rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }}:/home/ec2-user/django-indexer +# echo "Code has been pushed to the EC2 instance." + +# - name: Check for pending migrations +# id: check_migrations +# run: | +# echo "Checking for pending migrations..." +# pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") +# echo "::set-output name=pending::${pending_migrations}" +# if [ -z "$pending_migrations" ]; then +# echo "No migrations found." +# else: +# echo "Migrations found, stopping services." + +# - name: Stop services if migrations are pending +# if: steps.check_migrations.outputs.pending +# run: | +# echo "Stopping services..." +# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl stop gunicorn.service indexer.service" + +# - name: Run migrations +# if: steps.check_migrations.outputs.pending +# run: | +# echo "Running migrations..." +# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py migrate" + +# - name: Run collectstatic +# run: | +# echo "Running collectstatic..." +# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py collectstatic --noinput" + +# - name: Restart services if migrations were run +# if: steps.check_migrations.outputs.pending +# run: | +# echo "Restarting services after migrations..." +# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service indexer.service" + +# - name: Restart services if no migrations +# if: steps.check_migrations.outputs.pending == '' +# run: | +# echo "Restarting services without migration..." +# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service" From 757f774f4418f234f68dd8cc7d6a71a096786760 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 09:50:59 -0400 Subject: [PATCH 009/127] remove whitespace in deploy workflow --- .github/workflows/deploy.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 9c958fe..554fb7f 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,4 +1,3 @@ -# This is a basic workflow to help you get started with Actions name: Deploy to EC2 on Push on: @@ -34,7 +33,7 @@ jobs: id: deploy run: | aws deploy create-deployment \ - --application-name django-indexer \ + --application-name django-indexer \ --deployment-group-name django-indexer-dev \ --deployment-config-name CodeDeployDefault.AllAtOnce \ --github-location repository=${{ github.repository }},commitId=${{ github.sha }} From 5eff58c08ed4a02306a3030cdd68386fae84cc78 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:22:22 -0400 Subject: [PATCH 010/127] update after_install script --- appspec.yml | 12 ++++++------ scripts/after_install.sh | 32 +++++++++++++++++++++++++++----- 2 files changed, 33 insertions(+), 11 deletions(-) diff --git a/appspec.yml b/appspec.yml index 76a51b2..b6fb084 100644 --- a/appspec.yml +++ b/appspec.yml @@ -3,12 +3,12 @@ os: linux files: - source: / destination: /home/ec2-user/django-indexer -# hooks: -# # Install: -# AfterInstall: -# - location: scripts/after_install.sh -# timeout: 300 -# runas: root +hooks: + # # Install: + AfterInstall: + - location: scripts/after_install.sh + timeout: 300 + runas: ec2-user # ApplicationStart: # - location: scripts/application_start.sh # timeout: 300 diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 2ebca6a..d3fa518 100644 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -1,10 +1,32 @@ #!/bin/bash -echo 'run after_install.sh' >> /home/ec2-user/myrepo/deploy.log +# Log output to a specific file +LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" -cd /home/ec2-user/django-indexer +echo 'Running after_install.sh' >> "$LOG_FILE" -echo 'poetry install' >> /home/ec2-user/myrepo/deploy.log +# Define the project directory +PROJECT_DIR="/home/ec2-user/django-indexer" -poetry shell +# Navigate to the project directory +cd "$PROJECT_DIR" -poetry install >> /home/ec2-user/myrepo/deploy.log \ No newline at end of file +# Source the specific poetry virtual environment +source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" + +# Check if there are pending migrations +if python manage.py showmigrations | grep '\[ \]'; then + echo 'Migrations found, stopping services...' >> "$LOG_FILE" + sudo systemctl stop gunicorn.service celery.service + + echo 'Applying migrations...' >> "$LOG_FILE" + python manage.py migrate >> "$LOG_FILE" + + echo 'Starting services...' >> "$LOG_FILE" + sudo systemctl start gunicorn.service celery.service +else + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + python manage.py collectstatic --noinput >> "$LOG_FILE" + sudo systemctl restart gunicorn.service celery.service +fi + +echo 'after_install.sh completed' >> "$LOG_FILE" From 160af1a401018c78728971677641e2c4a064c018 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:36:40 -0400 Subject: [PATCH 011/127] set correct file permissions in after_install.sh --- scripts/after_install.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) mode change 100644 => 100755 scripts/after_install.sh diff --git a/scripts/after_install.sh b/scripts/after_install.sh old mode 100644 new mode 100755 index d3fa518..32cbe2a --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -15,7 +15,7 @@ source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.1 # Check if there are pending migrations if python manage.py showmigrations | grep '\[ \]'; then - echo 'Migrations found, stopping services...' >> "$LOG_FILE" + echo 'Migrations found; stopping services...' >> "$LOG_FILE" sudo systemctl stop gunicorn.service celery.service echo 'Applying migrations...' >> "$LOG_FILE" @@ -29,4 +29,8 @@ else sudo systemctl restart gunicorn.service celery.service fi +# Set correct permissions for all files in the project directory +chown -R ec2-user:ec2-user /home/ec2-user/django-indexer/ +chmod -R 775 /home/ec2-user/django-indexer/ + echo 'after_install.sh completed' >> "$LOG_FILE" From 96611373bef0c3f694bc447e4803ca427a700e8d Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:38:43 -0400 Subject: [PATCH 012/127] add timetstamp to after_install logs --- scripts/after_install.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 32cbe2a..2d47cb9 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -4,6 +4,9 @@ LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" echo 'Running after_install.sh' >> "$LOG_FILE" +# Log the current date and time in a human-readable format +echo "Script execution started at: $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" + # Define the project directory PROJECT_DIR="/home/ec2-user/django-indexer" From a81135438e5689c6bf76c7a1aba23479dedbeb48 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:41:02 -0400 Subject: [PATCH 013/127] add log to after_install --- scripts/after_install.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 2d47cb9..3840b2a 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -36,4 +36,6 @@ fi chown -R ec2-user:ec2-user /home/ec2-user/django-indexer/ chmod -R 775 /home/ec2-user/django-indexer/ +echo "Set permissions for all files in the project directory" >> "$LOG_FILE" + echo 'after_install.sh completed' >> "$LOG_FILE" From 88a8cd399ceecfc0a5e9b07a762ac8e78d7b0589 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:52:58 -0400 Subject: [PATCH 014/127] fix permissions settings in after_install --- scripts/after_install.sh | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 3840b2a..7e883df 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -32,10 +32,16 @@ else sudo systemctl restart gunicorn.service celery.service fi -# Set correct permissions for all files in the project directory -chown -R ec2-user:ec2-user /home/ec2-user/django-indexer/ -chmod -R 775 /home/ec2-user/django-indexer/ +# Set correct ownership recursively for all files and directories in the project directory +sudo chown -R ec2-user:ec2-user /home/ec2-user/django-indexer/ +echo "$(date) - Changed ownership to ec2-user for all project files" >> "$LOG_FILE" -echo "Set permissions for all files in the project directory" >> "$LOG_FILE" +# Set read, write, and execute permissions for the owner and group, and read and execute for others +sudo chmod -R 775 /home/ec2-user/django-indexer/ +echo "$(date) - Set permissions to 775 for all project files" >> "$LOG_FILE" + +# Log the results of permissions change +echo "Permissions after update:" >> "$LOG_FILE" +ls -lah /home/ec2-user/django-indexer/ >> "$LOG_FILE" echo 'after_install.sh completed' >> "$LOG_FILE" From 4715bc661766e4e10f550792359ea994f8fbcc51 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:57:25 -0400 Subject: [PATCH 015/127] add elastic IP to allowed hosts --- base/settings.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/base/settings.py b/base/settings.py index 37a0b34..fe661d7 100644 --- a/base/settings.py +++ b/base/settings.py @@ -30,7 +30,7 @@ # TODO: update before prod release SECRET_KEY = "django-insecure-=r_v_es6w6rxv42^#kc2hca6p%=fe_*cog_5!t%19zea!enlju" -ALLOWED_HOSTS = ["ec2-52-23-183-168.compute-1.amazonaws.com", "127.0.0.1"] +ALLOWED_HOSTS = ["ec2-100-27-57-47.compute-1.amazonaws.com", "127.0.0.1"] # Env vars AWS_ACCESS_KEY_ID = os.environ.get("PL_AWS_ACCESS_KEY_ID") @@ -190,7 +190,7 @@ # LOGGING # Setting the log level from an environment variable -LOG_LEVEL = os.getenv('LOG_LEVEL', 'INFO').upper() +LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper() log_level = getattr(logging, LOG_LEVEL, logging.INFO) # print("LOG_LEVEL: ", LOG_LEVEL) @@ -199,15 +199,13 @@ "version": 1, "disable_existing_loggers": False, "formatters": { - "standard": { - "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s" - }, + "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"}, }, "handlers": { "console": { "level": log_level, "class": "logging.StreamHandler", - "formatter": "standard" + "formatter": "standard", }, }, "loggers": { @@ -221,11 +219,8 @@ "level": log_level, "propagate": False, }, - "": { # root logger - "handlers": ["console"], - "level": log_level - } - } + "": {"handlers": ["console"], "level": log_level}, # root logger + }, } # Adding Watchtower logging handler for non-local environments From 948a52eea1abfff2b96bac938163982e9d629b89 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 10:57:55 -0400 Subject: [PATCH 016/127] remove permissions logging from after_install --- scripts/after_install.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 7e883df..8139d7e 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -40,8 +40,8 @@ echo "$(date) - Changed ownership to ec2-user for all project files" >> "$LOG_FI sudo chmod -R 775 /home/ec2-user/django-indexer/ echo "$(date) - Set permissions to 775 for all project files" >> "$LOG_FILE" -# Log the results of permissions change -echo "Permissions after update:" >> "$LOG_FILE" -ls -lah /home/ec2-user/django-indexer/ >> "$LOG_FILE" +# # Log the results of permissions change +# echo "Permissions after update:" >> "$LOG_FILE" +# ls -lah /home/ec2-user/django-indexer/ >> "$LOG_FILE" echo 'after_install.sh completed' >> "$LOG_FILE" From 42a976f8f56436beee1f4f423080db069d10376d Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 11:16:27 -0400 Subject: [PATCH 017/127] update nginx group permissions --- scripts/after_install.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 8139d7e..23d7ae2 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -40,6 +40,12 @@ echo "$(date) - Changed ownership to ec2-user for all project files" >> "$LOG_FI sudo chmod -R 775 /home/ec2-user/django-indexer/ echo "$(date) - Set permissions to 775 for all project files" >> "$LOG_FILE" +# Set the group of the Django project and static files to nginx +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer + +# Set the group of the socket directory to nginx +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/run + # # Log the results of permissions change # echo "Permissions after update:" >> "$LOG_FILE" # ls -lah /home/ec2-user/django-indexer/ >> "$LOG_FILE" From fea902f1473921ac56c742e43bc89ea6415c1da9 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 11:24:46 -0400 Subject: [PATCH 018/127] add poetry install to after_install.sh --- scripts/after_install.sh | 58 +++++++++++++++++++++++++++------------- 1 file changed, 39 insertions(+), 19 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 23d7ae2..a3006ad 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -4,8 +4,37 @@ LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" echo 'Running after_install.sh' >> "$LOG_FILE" -# Log the current date and time in a human-readable format -echo "Script execution started at: $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +# # Log the current date and time in a human-readable format +# echo "Script execution started at: $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" + +# # Set correct ownership recursively for all files and directories in the project directory +# sudo chown -R ec2-user:ec2-user /home/ec2-user/django-indexer/ +# echo "$(date) - Changed ownership to ec2-user for all project files" >> "$LOG_FILE" + +# # Set read, write, and execute permissions for the owner and group, and read and execute for others +# sudo chmod -R 775 /home/ec2-user/django-indexer/ +# echo "$(date) - Set permissions to 775 for all project files" >> "$LOG_FILE" + +# # Set the group of the Django project and static files to nginx +# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer + +# # Set the group of the socket directory to nginx +# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/run + +# # Restart the nginx service to propagate the changes +# sudo systemctl restart nginx + +# Set correct ownership recursively for project directory +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# Set the necessary permissions +sudo chmod -R 775 /home/ec2-user/django-indexer/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# Restart nginx to apply any configuration changes +sudo systemctl restart nginx +echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" # Define the project directory PROJECT_DIR="/home/ec2-user/django-indexer" @@ -16,36 +45,27 @@ cd "$PROJECT_DIR" # Source the specific poetry virtual environment source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" +# Install dependencies using Poetry +echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +poetry install >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + # Check if there are pending migrations if python manage.py showmigrations | grep '\[ \]'; then echo 'Migrations found; stopping services...' >> "$LOG_FILE" - sudo systemctl stop gunicorn.service celery.service + sudo systemctl stop gunicorn celery echo 'Applying migrations...' >> "$LOG_FILE" python manage.py migrate >> "$LOG_FILE" echo 'Starting services...' >> "$LOG_FILE" - sudo systemctl start gunicorn.service celery.service + sudo systemctl start gunicorn celery else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" - sudo systemctl restart gunicorn.service celery.service + sudo systemctl restart gunicorn celery fi -# Set correct ownership recursively for all files and directories in the project directory -sudo chown -R ec2-user:ec2-user /home/ec2-user/django-indexer/ -echo "$(date) - Changed ownership to ec2-user for all project files" >> "$LOG_FILE" - -# Set read, write, and execute permissions for the owner and group, and read and execute for others -sudo chmod -R 775 /home/ec2-user/django-indexer/ -echo "$(date) - Set permissions to 775 for all project files" >> "$LOG_FILE" - -# Set the group of the Django project and static files to nginx -sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer - -# Set the group of the socket directory to nginx -sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/run - # # Log the results of permissions change # echo "Permissions after update:" >> "$LOG_FILE" # ls -lah /home/ec2-user/django-indexer/ >> "$LOG_FILE" From 3d2fbc0728fafbe584dac065eae168cf2701eb24 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 11:28:46 -0400 Subject: [PATCH 019/127] remove old code from after_install --- scripts/after_install.sh | 31 +++++-------------------------- 1 file changed, 5 insertions(+), 26 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index a3006ad..53d4e4f 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -2,27 +2,10 @@ # Log output to a specific file LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" -echo 'Running after_install.sh' >> "$LOG_FILE" - -# # Log the current date and time in a human-readable format -# echo "Script execution started at: $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" - -# # Set correct ownership recursively for all files and directories in the project directory -# sudo chown -R ec2-user:ec2-user /home/ec2-user/django-indexer/ -# echo "$(date) - Changed ownership to ec2-user for all project files" >> "$LOG_FILE" - -# # Set read, write, and execute permissions for the owner and group, and read and execute for others -# sudo chmod -R 775 /home/ec2-user/django-indexer/ -# echo "$(date) - Set permissions to 775 for all project files" >> "$LOG_FILE" - -# # Set the group of the Django project and static files to nginx -# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer - -# # Set the group of the socket directory to nginx -# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/run - -# # Restart the nginx service to propagate the changes -# sudo systemctl restart nginx +echo -e "\n\n" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" +echo "Running after_install.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" # Set correct ownership recursively for project directory sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ @@ -66,8 +49,4 @@ else sudo systemctl restart gunicorn celery fi -# # Log the results of permissions change -# echo "Permissions after update:" >> "$LOG_FILE" -# ls -lah /home/ec2-user/django-indexer/ >> "$LOG_FILE" - -echo 'after_install.sh completed' >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 5f06c5d58f3ba52fa4a3a92f4d0721bd370bb64b Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 15:11:14 -0400 Subject: [PATCH 020/127] update donation admin to fix n+1 query issue --- donations/admin.py | 53 ++++++++++++++++++++++++++++++++++------------ 1 file changed, 39 insertions(+), 14 deletions(-) diff --git a/donations/admin.py b/donations/admin.py index da83b44..ad4e920 100644 --- a/donations/admin.py +++ b/donations/admin.py @@ -1,20 +1,45 @@ from django.contrib import admin from django.utils.dateformat import format from django.utils.timezone import localtime + from .models import Donation + +@admin.register(Donation) class DonationAdmin(admin.ModelAdmin): - list_display = [field.name for field in Donation._meta.get_fields() if field.name != 'id'] - list_display.extend(['donor_address', 'recipient_address', 'ft_address', 'referrer_address', 'chef_address']) # Add custom methods for addresses - search_fields = ('message', 'donor__address') # You can add more fields here - list_filter = ('donated_at', 'donor', 'pot') # Added default filters, you can add custom DateRangeFilter - date_hierarchy = 'donated_at' - ordering = ('-donated_at',) + list_display = [ + field.name for field in Donation._meta.get_fields() if field.name != "id" + ] + list_display.extend( + [ + "donor_address", + "recipient_address", + "ft_address", + "referrer_address", + "chef_address", + ] + ) # Add custom methods for addresses + search_fields = ( + "message", + "donor__id", + ) # Correct field name from 'donor__address' to 'donor__id' if 'id' is used in the model + list_filter = ("donated_at", "donor", "pot") + date_hierarchy = "donated_at" + ordering = ("-donated_at",) + + def get_queryset(self, request): + # Prefetch related donor, recipient, ft, referrer, and chef to prevent N+1 queries + return ( + super() + .get_queryset(request) + .prefetch_related("donor", "recipient", "ft", "referrer", "chef") + ) def donor_address(self, obj): return obj.donor.id - donor_address.admin_order_field = 'donor__address' # Allows column order sorting - donor_address.short_description = 'Donor Address' + + donor_address.admin_order_field = "donor__id" + donor_address.short_description = "Donor Address" def recipient_address(self, obj): return obj.recipient.id if obj.recipient else None @@ -29,10 +54,10 @@ def chef_address(self, obj): return obj.chef.id if obj.chef else None def formfield_for_dbfield(self, db_field, request, **kwargs): - field = super(DonationAdmin, self).formfield_for_dbfield(db_field, request, **kwargs) - if db_field.name in ['donated_at']: # Add more fields if needed - field.widget.format = '%d-%m-%Y %H:%M' # Change to your preferred format - field.widget.attrs.update({'class': 'vDateField', 'size': '20'}) + field = super(DonationAdmin, self).formfield_for_dbfield( + db_field, request, **kwargs + ) + if db_field.name == "donated_at": + field.widget.format = "%d-%m-%Y %H:%M" + field.widget.attrs.update({"class": "vDateField", "size": "20"}) return field - -admin.site.register(Donation, DonationAdmin) From e2ad39c5f72e4ca439e666efba21a81281f26896 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 17:08:05 -0400 Subject: [PATCH 021/127] use tz aware datetime --- indexer_app/utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 9c3562b..7e50e4d 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -6,11 +6,11 @@ import requests from django.conf import settings from django.core.cache import cache +from django.utils import timezone from near_lake_framework.near_primitives import ExecutionOutcome, Receipt from accounts.models import Account from activities.models import Activity -from base.logging import logger from base.utils import format_date, format_to_near from donations.models import Donation from indexer_app.models import BlockHeight @@ -25,6 +25,8 @@ ) from tokens.models import Token, TokenHistoricalPrice +from .logging import logger + GECKO_URL = "https://api.coingecko.com/api/v3" @@ -886,7 +888,7 @@ async def cache_block_height(key: str, height: int, block_count: int) -> int: if (block_count % int(settings.BLOCK_SAVE_HEIGHT or 400)) == 0: logger.info(f"saving daylight, {height}") await BlockHeight.objects.aupdate_or_create( - id=1, defaults={"block_height": height, "updated_at": datetime.now()} + id=1, defaults={"block_height": height, "updated_at": timezone.now()} ) # better than ovverriding model's save method to get a singleton? we need only one entry return height From b9dddc5d798ab07cbce279411a2b25c516a2ccec Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 30 Apr 2024 17:10:29 -0400 Subject: [PATCH 022/127] specify start block for debugging --- indexer_app/tasks.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index ecf33cc..010edc4 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -25,13 +25,17 @@ async def indexer(network: str, from_block: int, to_block: int): lake_config.aws_secret_key = settings.AWS_SECRET_ACCESS_KEY _, streamer_messages_queue = streamer(lake_config) block_count = 0 - + while True: try: # streamer_message is the current block streamer_message = await streamer_messages_queue.get() block_count += 1 - await cache_block_height("current_block_height", streamer_message.block.header.height, block_count) # current block height + await cache_block_height( + "current_block_height", + streamer_message.block.header.height, + block_count, + ) # current block height await handle_streamer_message(streamer_message) except Exception as e: logger.error(f"Error in streamer_messages_queue: {e}") @@ -45,16 +49,19 @@ def listen_to_near_events(): try: # Update below with desired network & block height - start_block = get_block_height('current_block_height') - # start_block = 104_963_982 + # start_block = get_block_height('current_block_height') + start_block = 105_923_501 # manually setting for debugging TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") - loop.run_until_complete(indexer("mainnet", start_block-1, None)) + loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: loop.close() from celery.signals import task_revoked + @task_revoked.connect def on_task_revoked(request, terminated, signum, expired, **kwargs): - logger.info(f"Task {request.id} revoked; terminated={terminated}, signum={signum}, expired={expired}") + logger.info( + f"Task {request.id} revoked; terminated={terminated}, signum={signum}, expired={expired}" + ) From 1221100c42923c24abbc6854fb323bfa48f93bd2 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 1 May 2024 08:23:41 -0400 Subject: [PATCH 023/127] trigger redeployment --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 010edc4..89293c0 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -43,7 +43,7 @@ async def indexer(network: str, from_block: int, to_block: int): @shared_task def listen_to_near_events(): - logger.info("Listening to near events...") + logger.info("Listening to NEAR events...") loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) From 8aeb330d4b3140a76a4164eaaa8a8b105d56316c Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 1 May 2024 09:36:38 -0400 Subject: [PATCH 024/127] fix settings.py for local celery --- base/settings.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/base/settings.py b/base/settings.py index fe661d7..41f5753 100644 --- a/base/settings.py +++ b/base/settings.py @@ -124,9 +124,13 @@ # Append SSL parameters as query parameters in the URL SSL_QUERY = "?ssl_cert_reqs=CERT_NONE" # TODO: UPDATE ACCORDING TO ENV (prod should require cert) -CELERY_BROKER_URL = f"{REDIS_BASE_URL}/0{SSL_QUERY}" +CELERY_BROKER_URL = f"{REDIS_BASE_URL}/0" +CELERY_RESULT_BACKEND = f"{REDIS_BASE_URL}/1" -CELERY_RESULT_BACKEND = f"{REDIS_BASE_URL}/1{SSL_QUERY}" + +if ENVIRONMENT != "local": + CELERY_BROKER_URL += SSL_QUERY + CELERY_RESULT_BACKEND += SSL_QUERY CELERY_BROKER_TRANSPORT_OPTIONS = { From 1c69d33e84c290e939c872fa2e3274f04c3d0cfd Mon Sep 17 00:00:00 2001 From: Prometheus Date: Thu, 2 May 2024 20:32:30 +0100 Subject: [PATCH 025/127] refactor batch function, edit model --- donations/models.py | 11 ++- indexer_app/handler.py | 38 ++++++---- indexer_app/models.py | 5 ++ indexer_app/tasks.py | 3 +- indexer_app/utils.py | 165 +---------------------------------------- 5 files changed, 43 insertions(+), 179 deletions(-) diff --git a/donations/models.py b/donations/models.py index 8c932e1..64e1500 100644 --- a/donations/models.py +++ b/donations/models.py @@ -156,6 +156,13 @@ class Donation(models.Model): class Meta: constraints = [ models.UniqueConstraint( - fields=["on_chain_id", "pot"], name="unique_pot_on_chain_id" - ) + fields=["on_chain_id"], + condition=models.Q(pot__isnull=True), + name="unique_on_chain_id_when_pot_is_null", + ), + models.UniqueConstraint( + fields=["on_chain_id", "pot"], + condition=models.Q(pot__isnull=False), + name="unique_on_chain_id_with_pot", + ), ] diff --git a/indexer_app/handler.py b/indexer_app/handler.py index 6fbf24a..45d5b1a 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -5,8 +5,12 @@ from django.core.cache import cache from near_lake_framework import near_primitives +from base.utils import convert_ns_to_utc +from pots.utils import match_pot_factory_version_pattern + from .logging import logger from .utils import ( + handle_batch_donations, handle_default_list_status_change, handle_list_admin_removal, handle_list_registration_update, @@ -22,8 +26,6 @@ handle_set_payouts, handle_transfer_payout, ) -from base.utils import convert_ns_to_utc -from pots.utils import match_pot_factory_version_pattern async def handle_streamer_message(streamer_message: near_primitives.StreamerMessage): @@ -62,7 +64,7 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess try: parsed_log = json.loads(log[len("EVENT_JSON:") :]) except json.JSONDecodeError: - logging.warning( + logger.warning( f"Receipt ID: `{receipt_execution_outcome.receipt.receipt_id}`\nError during parsing logs from JSON string to dict" ) continue @@ -158,16 +160,24 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess case "donate": # TODO: donate that produces result logger.info(f"switching bazooka to knifee works!! donate his blood: {args_dict}, {receipt}, {action}, {log_data}") - await handle_new_donations( - args_dict, - receiver_id, - signer_id, - "direct", - receipt, - status_obj, - log_data, - created_at, - ) + if len(log_data) > 1: + await handle_batch_donations( + receiver_id, + signer_id, + "direct", + receipt, + log_data + ) + else: + await handle_new_donations( + args_dict, + receiver_id, + signer_id, + "direct", + receipt, + status_obj, + log_data, + ) break case "handle_protocol_fee_callback": @@ -180,7 +190,6 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess receipt, status_obj, log_data, - created_at, ) break @@ -194,7 +203,6 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess receipt, status_obj, log_data, - created_at, ) break diff --git a/indexer_app/models.py b/indexer_app/models.py index f8c0a4b..ace12ec 100644 --- a/indexer_app/models.py +++ b/indexer_app/models.py @@ -12,6 +12,11 @@ class BlockHeight(models.Model): _("blockheight value"), help_text=_("the last blockheight saved to db."), ) + block_timestamp = models.DateTimeField( + _("block timestamp"), + help_text=_("date equivalent of the block height."), + null=True, + ) updated_at = models.DateTimeField( _("updated at"), help_text=_("block height last update at."), diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 89293c0..1df4312 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -35,6 +35,7 @@ async def indexer(network: str, from_block: int, to_block: int): "current_block_height", streamer_message.block.header.height, block_count, + streamer_message.block.header.timestamp ) # current block height await handle_streamer_message(streamer_message) except Exception as e: @@ -52,7 +53,7 @@ def listen_to_near_events(): # start_block = get_block_height('current_block_height') start_block = 105_923_501 # manually setting for debugging TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") - loop.run_until_complete(indexer("mainnet", start_block - 1, None)) + loop.run_until_complete(indexer("mainnet", 111_949_088, None)) finally: loop.close() diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 7e50e4d..cfd995f 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -505,154 +505,7 @@ async def handle_batch_donations( ): logger.info("BAtch Transaction for donation...") for event_data in log_data: - donation_data = event_data["donation"] - net_amount = int(donation_data["total_amount"]) - int( - donation_data["protocol_fee"] - ) - logger.info(f"Donation data: {donation_data}, {net_amount}") - # insert donate contract which is the receiver id(because of activitry relationship mainly) - donate_contract, _ = await Account.objects.aget_or_create(id=receiverId) - donated_at = datetime.fromtimestamp( - (donation_data.get("donated_at") or donation_data.get("donated_at_ms")) - / 1000 - ) - - # Upsert donor account - donor, _ = await Account.objects.aget_or_create(id=donation_data["donor_id"]) - - recipient = None - if donation_data.get("recipient_id"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["recipient_id"] - ) - else: - if not donation_data.get("matching_pool"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["project_id"] - ) - - if donation_data.get("referrer_id"): - referrer, _ = await Account.objects.aget_or_create( - id=donation_data["referrer_id"] - ) - - # Upsert token account - token_acct, _ = await Account.objects.aget_or_create( - id=(donation_data.get("ft_id") or "near") - ) - - # Upsert token - try: - token = await Token.objects.aget(id=token_acct) - except Token.DoesNotExist: - # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common - token = await Token.objects.acreate(id=token_acct, decimals=12) - - # Fetch historical token data - # late_p = await token.get_most_recent_price() - try: - logger.info("fetching historical price...") - endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" - response = requests.get(endpoint) - price_data = response.json() - unit_price = ( - price_data.get("market_data", {}).get("current_price", {}).get("usd") - ) - logger.info(f"the usd price is what, {unit_price}") - await TokenHistoricalPrice.objects.acreate( - token=token, - price_usd=unit_price, - ) - except Exception as e: - logger.warning(f"api rate limit? {e}") - # TODO: NB: below method has not been tested - # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async - historical = await TokenHistoricalPrice.objects.aget( - token=token, - price_usd=unit_price, - ) - # print("fetched old price:", historical_price.price_usd) - unit_price = historical.price_usd - - total_amount = donation_data["total_amount"] - net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) - - # Calculate USD amounts - totalnearAmount = format_to_near(total_amount) - netnearAmount = format_to_near(net_amount) - total_amount_usd = unit_price * totalnearAmount - net_amount_usd = unit_price * netnearAmount - - logger.info(f"inserting donations... {total_amount_usd}") - donation = await Donation.objects.acreate( - on_chain_id=donation_data["id"], - donor=donor, - total_amount=total_amount, - total_amount_usd=total_amount_usd, - net_amount_usd=net_amount_usd, - net_amount=net_amount, - ft=token_acct, - message=donation_data.get("message"), - donated_at=donated_at, - matching_pool=donation_data.get("matching_pool", False), - recipient=recipient, - protocol_fee=donation_data["protocol_fee"], - referrer=referrer if donation_data.get("referrer_id") else None, - referrer_fee=donation_data.get("referrer_fee"), - tx_hash=receipt_obj.receipt_id, - ) - - if actionName != "direct": - logger.info("selecting pot to make public donation update") - pot = await Pot.objects.aget(id=receiverId) - await Donation.objects.filter(id=donation.id).aupdate(**{"pot": pot}) - potUpdate = { - "total_public_donations": int(pot.total_public_donations or 0) - + int(total_amount), - } - if donation_data.get("matching_pool"): - potUpdate["total_matching_pool"] = ( - pot.total_matching_pool or 0 - ) + total_amount - potUpdate["matching_pool_donations_count"] = ( - pot.matching_pool_donations_count or 0 - ) + 1 - # accountUpdate = {} - else: - potUpdate["public_donations_count"] = ( - pot.public_donations_count or 0 - ) + 1 - await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) - - # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) - logger.info( - f"update totl donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" - ) - await Account.objects.filter(id=donor.id).aupdate( - **{ - "total_donations_out_usd": donor.total_donations_out_usd - + decimal.Decimal(total_amount_usd) - } - ) - if recipient: - acct = await Account.objects.aget(id=recipient.id) - logger.info(f"selected {acct} to perform donor count update") - acctUpdate = { - "donors_count": acct.donors_count + 1, - "total_donations_in_usd": acct.total_donations_in_usd - + decimal.Decimal(net_amount_usd), - } - await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) - - # Insert activity record - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=donation.donated_at, - type="Donate_Direct", - action_result=donation_data, - tx_hash=receipt_obj.receipt_id, - ) + await handle_new_donations(event_data["donation"], receiverId, signerId, actionName, receipt_obj, status_obj=None, log_data=[event_data]) async def handle_new_donations( @@ -663,7 +516,6 @@ async def handle_new_donations( receipt_obj: Receipt, status_obj: ExecutionOutcome, log_data: list, - created_at: datetime, ): logger.info(f"new donation data: {data}, {receiverId}") @@ -676,16 +528,6 @@ async def handle_new_donations( if not log_data: return - if len(log_data) > 1: - # log_data = [ - # x - # for x in log_data - # if x["donation"]["recipient_id"] == data["recipient_id"] - # ] - return await handle_batch_donations( - receiverId, signerId, actionName, receipt_obj, log_data - ) - logger.info(f"event after possible filtering: {log_data}") event_data = log_data[0] @@ -882,13 +724,14 @@ async def handle_new_donations( ) -async def cache_block_height(key: str, height: int, block_count: int) -> int: +async def cache_block_height(key: str, height: int, block_count: int, block_timestamp: int) -> int: await cache.aset(key, height) # the cache os the default go to for the restart block, the db is a backup if the redis server crashes. if (block_count % int(settings.BLOCK_SAVE_HEIGHT or 400)) == 0: logger.info(f"saving daylight, {height}") await BlockHeight.objects.aupdate_or_create( - id=1, defaults={"block_height": height, "updated_at": timezone.now()} + id=1, + defaults={"block_height": height, "block_timestamp": datetime.fromtimestamp(block_timestamp / 1000000000), "updated_at": timezone.now()} ) # better than ovverriding model's save method to get a singleton? we need only one entry return height From a04c04f816afcc1d6fca5c72e502d500726bb785 Mon Sep 17 00:00:00 2001 From: Prometheus Date: Thu, 2 May 2024 20:38:14 +0100 Subject: [PATCH 026/127] correct start block --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 1df4312..4a33bbb 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -53,7 +53,7 @@ def listen_to_near_events(): # start_block = get_block_height('current_block_height') start_block = 105_923_501 # manually setting for debugging TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") - loop.run_until_complete(indexer("mainnet", 111_949_088, None)) + loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: loop.close() From 60801acb699408b12e9fe4e9e2f05f6e6801481c Mon Sep 17 00:00:00 2001 From: Prometheus Date: Fri, 3 May 2024 13:28:30 +0100 Subject: [PATCH 027/127] feat: make donation entries update_or_create --- .../migrations/0002_alter_activity_options.py | 17 ++ ...onation_unique_pot_on_chain_id_and_more.py | 35 ++++ .../0002_blockheight_block_timestamp.py | 22 ++ indexer_app/utils.py | 198 +++++++++--------- 4 files changed, 174 insertions(+), 98 deletions(-) create mode 100644 activities/migrations/0002_alter_activity_options.py create mode 100644 donations/migrations/0005_remove_donation_unique_pot_on_chain_id_and_more.py create mode 100644 indexer_app/migrations/0002_blockheight_block_timestamp.py diff --git a/activities/migrations/0002_alter_activity_options.py b/activities/migrations/0002_alter_activity_options.py new file mode 100644 index 0000000..12fd19c --- /dev/null +++ b/activities/migrations/0002_alter_activity_options.py @@ -0,0 +1,17 @@ +# Generated by Django 5.0.4 on 2024-05-03 10:29 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("activities", "0001_initial"), + ] + + operations = [ + migrations.AlterModelOptions( + name="activity", + options={"verbose_name_plural": "Activities"}, + ), + ] diff --git a/donations/migrations/0005_remove_donation_unique_pot_on_chain_id_and_more.py b/donations/migrations/0005_remove_donation_unique_pot_on_chain_id_and_more.py new file mode 100644 index 0000000..67e0e0b --- /dev/null +++ b/donations/migrations/0005_remove_donation_unique_pot_on_chain_id_and_more.py @@ -0,0 +1,35 @@ +# Generated by Django 5.0.4 on 2024-05-03 10:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0001_initial"), + ("donations", "0004_alter_donation_on_chain_id"), + ("pots", "0001_initial"), + ] + + operations = [ + migrations.RemoveConstraint( + model_name="donation", + name="unique_pot_on_chain_id", + ), + migrations.AddConstraint( + model_name="donation", + constraint=models.UniqueConstraint( + condition=models.Q(("pot__isnull", True)), + fields=("on_chain_id",), + name="unique_on_chain_id_when_pot_is_null", + ), + ), + migrations.AddConstraint( + model_name="donation", + constraint=models.UniqueConstraint( + condition=models.Q(("pot__isnull", False)), + fields=("on_chain_id", "pot"), + name="unique_on_chain_id_with_pot", + ), + ), + ] diff --git a/indexer_app/migrations/0002_blockheight_block_timestamp.py b/indexer_app/migrations/0002_blockheight_block_timestamp.py new file mode 100644 index 0000000..deba8d4 --- /dev/null +++ b/indexer_app/migrations/0002_blockheight_block_timestamp.py @@ -0,0 +1,22 @@ +# Generated by Django 5.0.4 on 2024-05-03 10:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("indexer_app", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="blockheight", + name="block_timestamp", + field=models.DateTimeField( + help_text="date equivalent of the block height.", + null=True, + verbose_name="block timestamp", + ), + ), + ] diff --git a/indexer_app/utils.py b/indexer_app/utils.py index cfd995f..bd142b4 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -612,116 +612,118 @@ async def handle_new_donations( total_amount_usd = unit_price * totalnearAmount net_amount_usd = unit_price * netnearAmount - logger.info(f"inserting donations... {total_amount_usd}") - if actionName == "direct": # - donation = await Donation.objects.acreate( + logger.info(f"inserting donations... by {actionName}, {total_amount_usd}") + default_data = { + "donor": donor, + "total_amount": total_amount, + "total_amount_usd": total_amount_usd, + "net_amount_usd": net_amount_usd, + "net_amount": net_amount, + "ft": token_acct, + "message": donation_data.get("message"), + "donated_at": donated_at, + "matching_pool": donation_data.get("matching_pool", False), + "recipient": recipient, + "protocol_fee": donation_data["protocol_fee"], + "referrer": referrer if donation_data.get("referrer_id") else None, + "referrer_fee": donation_data.get("referrer_fee"), + "tx_hash": receipt_obj.receipt_id, + } + created = False + if actionName == "direct": + donation, created = await Donation.objects.aupdate_or_create( on_chain_id=donation_data["id"], - donor=donor, - total_amount=total_amount, - total_amount_usd=total_amount_usd, - net_amount_usd=net_amount_usd, - net_amount=net_amount, - ft=token_acct, - message=donation_data.get("message"), - donated_at=donated_at, - matching_pool=donation_data.get("matching_pool", False), - recipient=recipient, - protocol_fee=donation_data["protocol_fee"], - referrer=referrer if donation_data.get("referrer_id") else None, - referrer_fee=donation_data.get("referrer_fee"), - tx_hash=receipt_obj.receipt_id, + defaults={}, + create_defaults=default_data ) + # forgot why i didn't use else, but didn't for a reason. if actionName != "direct": logger.info("selecting pot to make public donation update") pot = await Pot.objects.aget(id=receiverId) - donation = await Donation.objects.acreate( + default_data["pot"] = pot + donation, created = await Donation.objects.aupdate_or_create( on_chain_id=donation_data["id"], - donor=donor, - pot=pot, - total_amount=total_amount, - total_amount_usd=total_amount_usd, - net_amount_usd=net_amount_usd, - net_amount=net_amount, - ft=token_acct, - message=donation_data.get("message"), - donated_at=donated_at, - matching_pool=donation_data.get("matching_pool", False), - recipient=recipient, - protocol_fee=donation_data["protocol_fee"], - referrer=referrer if donation_data.get("referrer_id") else None, - referrer_fee=donation_data.get("referrer_fee"), - tx_hash=receipt_obj.receipt_id, + defaults={}, + create_defaults=default_data ) - potUpdate = { - "total_public_donations": str( - int(pot.total_public_donations or 0) + int(total_amount) - ), - "total_public_donations_usd": int(pot.total_public_donations_usd or 0.0) - + total_amount_usd, - } - if donation_data.get("matching_pool"): - potUpdate["total_matching_pool"] = str( - int(pot.total_matching_pool or 0) + int(total_amount) - ) - potUpdate["total_matching_pool"] = ( - pot.total_matching_pool_usd or 0.0 - ) + total_amount_usd - potUpdate["matching_pool_donations_count"] = ( - pot.matching_pool_donations_count or 0 - ) + 1 - - if recipient: - await Account.objects.filter(id=recipient.id).aupdate( - **{ - "total_matching_pool_allocations_usd": recipient.total_matching_pool_allocations_usd - + total_amount_usd - } - ) - # accountUpdate = {} - else: - potUpdate["public_donations_count"] = (pot.public_donations_count or 0) + 1 + + logger.info(f"Backfilling data? {created}") - await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) + if created: # only do updates if donation object was created - # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) - logger.info( - f"update totl donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" - ) - await Account.objects.filter(id=donor.id).aupdate( - **{ - "total_donations_out_usd": donor.total_donations_out_usd - + decimal.Decimal(total_amount_usd) - } - ) - if recipient: - acct = await Account.objects.aget(id=recipient.id) - logger.info(f"selected {acct} to perform donor count update") - acctUpdate = { - "donors_count": acct.donors_count + 1, - "total_donations_in_usd": acct.total_donations_in_usd - + decimal.Decimal(net_amount_usd), - } - await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) + if actionName != "direct": - # Insert activity record - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=donation.donated_at, - type=( - "Donate_Direct" - if actionName == "direct" - else ( - "Donate_Pot_Matching_Pool" - if donation.matching_pool - else "Donate_Pot_Public" - ) - ), - action_result=donation_data, - tx_hash=receipt_obj.receipt_id, - ) + potUpdate = { + "total_public_donations": str( + int(pot.total_public_donations or 0) + int(total_amount) + ), + "total_public_donations_usd": int(pot.total_public_donations_usd or 0.0) + + total_amount_usd, + } + if donation_data.get("matching_pool"): + potUpdate["total_matching_pool"] = str( + int(pot.total_matching_pool or 0) + int(total_amount) + ) + potUpdate["total_matching_pool"] = ( + pot.total_matching_pool_usd or 0.0 + ) + total_amount_usd + potUpdate["matching_pool_donations_count"] = ( + pot.matching_pool_donations_count or 0 + ) + 1 + + if recipient: + await Account.objects.filter(id=recipient.id).aupdate( + **{ + "total_matching_pool_allocations_usd": recipient.total_matching_pool_allocations_usd + + total_amount_usd + } + ) + + # accountUpdate = {} + else: + potUpdate["public_donations_count"] = (pot.public_donations_count or 0) + 1 + + await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) + + # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) + logger.info( + f"update totl donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" + ) + await Account.objects.filter(id=donor.id).aupdate( + **{ + "total_donations_out_usd": donor.total_donations_out_usd + + decimal.Decimal(total_amount_usd) + } + ) + if recipient: + acct = await Account.objects.aget(id=recipient.id) + logger.info(f"selected {acct} to perform donor count update") + acctUpdate = { + "donors_count": acct.donors_count + 1, + "total_donations_in_usd": acct.total_donations_in_usd + + decimal.Decimal(net_amount_usd), + } + await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) + + # Insert activity record + await Activity.objects.acreate( + signer_id=signerId, + receiver_id=receiverId, + timestamp=donation.donated_at, + type=( + "Donate_Direct" + if actionName == "direct" + else ( + "Donate_Pot_Matching_Pool" + if donation.matching_pool + else "Donate_Pot_Public" + ) + ), + action_result=donation_data, + tx_hash=receipt_obj.receipt_id, + ) async def cache_block_height(key: str, height: int, block_count: int, block_timestamp: int) -> int: From d03c11afc5a1812d0688ad6b944f7fd375eede4d Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 12:46:21 -0400 Subject: [PATCH 028/127] update start block for debugging --- indexer_app/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 4a33bbb..9e23a2d 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -35,7 +35,7 @@ async def indexer(network: str, from_block: int, to_block: int): "current_block_height", streamer_message.block.header.height, block_count, - streamer_message.block.header.timestamp + streamer_message.block.header.timestamp, ) # current block height await handle_streamer_message(streamer_message) except Exception as e: @@ -51,7 +51,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height('current_block_height') - start_block = 105_923_501 # manually setting for debugging TODO: remove this + start_block = 104_858_762 # manually setting for debugging TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 5f66bc44d535e99bb5c1091bd0a0536eb13a00d5 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 13:25:05 -0400 Subject: [PATCH 029/127] debug logs for after_install migration check --- scripts/after_install.sh | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 53d4e4f..d5307c4 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -33,20 +33,38 @@ echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$L poetry install >> "$LOG_FILE" echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" -# Check if there are pending migrations -if python manage.py showmigrations | grep '\[ \]'; then - echo 'Migrations found; stopping services...' >> "$LOG_FILE" - sudo systemctl stop gunicorn celery +# Check if there are pending migrations and log the output +echo "Checking for pending migrations..." >> "$LOG_FILE" +PENDING_MIGRATIONS=$(python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" - echo 'Applying migrations...' >> "$LOG_FILE" +if [[ ! -z "$PENDING_MIGRATIONS" ]]; then + echo "$(date '+%Y-%m-%d %H:%M:%S') - Migrations found; stopping services..." >> "$LOG_FILE" + sudo systemctl stop gunicorn celery + echo "$(date '+%Y-%m-%d %H:%M:%S') - Applying migrations..." >> "$LOG_FILE" python manage.py migrate >> "$LOG_FILE" - - echo 'Starting services...' >> "$LOG_FILE" + echo "$(date '+%Y-%m-%d %H:%M:%S') - Starting services..." >> "$LOG_FILE" sudo systemctl start gunicorn celery else - echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + echo "$(date '+%Y-%m-%d %H:%M:%S') - No migrations found. Running collectstatic and restarting services..." >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" sudo systemctl restart gunicorn celery fi +# # Check if there are pending migrations +# if python manage.py showmigrations | grep '\[ \]'; then +# echo 'Migrations found; stopping services...' >> "$LOG_FILE" +# sudo systemctl stop gunicorn celery + +# echo 'Applying migrations...' >> "$LOG_FILE" +# python manage.py migrate >> "$LOG_FILE" + +# echo 'Starting services...' >> "$LOG_FILE" +# sudo systemctl start gunicorn celery +# else +# echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" +# python manage.py collectstatic --noinput >> "$LOG_FILE" +# sudo systemctl restart gunicorn celery +# fi + echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 7024a416be086193d95ca0a0a5e405c974a71bbd Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 13:30:27 -0400 Subject: [PATCH 030/127] migration check debugging --- scripts/after_install.sh | 36 +++++++++++++++++++++++++++++------- 1 file changed, 29 insertions(+), 7 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index d5307c4..f83b6c9 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -38,18 +38,40 @@ echo "Checking for pending migrations..." >> "$LOG_FILE" PENDING_MIGRATIONS=$(python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" -if [[ ! -z "$PENDING_MIGRATIONS" ]]; then - echo "$(date '+%Y-%m-%d %H:%M:%S') - Migrations found; stopping services..." >> "$LOG_FILE" +# Log the full output of showmigrations +echo "Checking for pending migrations..." >> "$LOG_FILE" +python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# Now, let's use a more direct check for unapplied migrations +PENDING_MIGRATIONS=$(python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +if [ "$PENDING_MIGRATIONS" -gt 0 ]; then + echo "Migrations found; stopping services..." >> "$LOG_FILE" sudo systemctl stop gunicorn celery - echo "$(date '+%Y-%m-%d %H:%M:%S') - Applying migrations..." >> "$LOG_FILE" - python manage.py migrate >> "$LOG_FILE" - echo "$(date '+%Y-%m-%d %H:%M:%S') - Starting services..." >> "$LOG_FILE" + + echo 'Applying migrations...' >> "$LOG_FILE" + python manage.py migrate >> "$LOG_FILE" 2>&1 + + echo 'Starting services...' >> "$LOG_FILE" sudo systemctl start gunicorn celery else - echo "$(date '+%Y-%m-%d %H:%M:%S') - No migrations found. Running collectstatic and restarting services..." >> "$LOG_FILE" - python manage.py collectstatic --noinput >> "$LOG_FILE" + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 sudo systemctl restart gunicorn celery fi +# # not working version +# if [[ ! -z "$PENDING_MIGRATIONS" ]]; then +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Migrations found; stopping services..." >> "$LOG_FILE" +# sudo systemctl stop gunicorn celery +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Applying migrations..." >> "$LOG_FILE" +# python manage.py migrate >> "$LOG_FILE" +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Starting services..." >> "$LOG_FILE" +# sudo systemctl start gunicorn celery +# else +# echo "$(date '+%Y-%m-%d %H:%M:%S') - No migrations found. Running collectstatic and restarting services..." >> "$LOG_FILE" +# python manage.py collectstatic --noinput >> "$LOG_FILE" +# sudo systemctl restart gunicorn celery +# fi # # Check if there are pending migrations # if python manage.py showmigrations | grep '\[ \]'; then From 002d94b14d14a8430a280b9ad0d5afb375e3a174 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 14:18:20 -0400 Subject: [PATCH 031/127] load env vars in after_install --- scripts/after_install.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index f83b6c9..defd978 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -7,6 +7,9 @@ echo "=========================================" >> "$LOG_FILE" echo "Running after_install.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" echo "=========================================" >> "$LOG_FILE" +# Load env vars +source /home/ec2-user/.bashrc + # Set correct ownership recursively for project directory sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" From 6d4e5762f86b22b4152d1b288551e9ff9b7867ce Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 14:29:40 -0400 Subject: [PATCH 032/127] remove old code --- scripts/after_install.sh | 33 ++------------------------------- 1 file changed, 2 insertions(+), 31 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index defd978..5ad3b7c 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -45,7 +45,7 @@ echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" echo "Checking for pending migrations..." >> "$LOG_FILE" python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose -# Now, let's use a more direct check for unapplied migrations +# Check for unapplied migrations PENDING_MIGRATIONS=$(python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations if [ "$PENDING_MIGRATIONS" -gt 0 ]; then @@ -61,35 +61,6 @@ else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 sudo systemctl restart gunicorn celery -fi -# # not working version -# if [[ ! -z "$PENDING_MIGRATIONS" ]]; then -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Migrations found; stopping services..." >> "$LOG_FILE" -# sudo systemctl stop gunicorn celery -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Applying migrations..." >> "$LOG_FILE" -# python manage.py migrate >> "$LOG_FILE" -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Starting services..." >> "$LOG_FILE" -# sudo systemctl start gunicorn celery -# else -# echo "$(date '+%Y-%m-%d %H:%M:%S') - No migrations found. Running collectstatic and restarting services..." >> "$LOG_FILE" -# python manage.py collectstatic --noinput >> "$LOG_FILE" -# sudo systemctl restart gunicorn celery -# fi - -# # Check if there are pending migrations -# if python manage.py showmigrations | grep '\[ \]'; then -# echo 'Migrations found; stopping services...' >> "$LOG_FILE" -# sudo systemctl stop gunicorn celery - -# echo 'Applying migrations...' >> "$LOG_FILE" -# python manage.py migrate >> "$LOG_FILE" - -# echo 'Starting services...' >> "$LOG_FILE" -# sudo systemctl start gunicorn celery -# else -# echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" -# python manage.py collectstatic --noinput >> "$LOG_FILE" -# sudo systemctl restart gunicorn celery -# fi +f echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From b05e457b68d447aa7724fb4607ac41ebca8117f4 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 14:31:10 -0400 Subject: [PATCH 033/127] display donation id on admin --- donations/admin.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/donations/admin.py b/donations/admin.py index ad4e920..b79f09b 100644 --- a/donations/admin.py +++ b/donations/admin.py @@ -7,9 +7,10 @@ @admin.register(Donation) class DonationAdmin(admin.ModelAdmin): - list_display = [ - field.name for field in Donation._meta.get_fields() if field.name != "id" - ] + # list_display = [ + # field.name for field in Donation._meta.get_fields() if field.name != "id" + # ] + list_display = ["__all__"] list_display.extend( [ "donor_address", From 544e2c0170d1a2d149e3093f09f29a4114480ca8 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 14:36:56 -0400 Subject: [PATCH 034/127] fix donation id on admin dashboard --- donations/admin.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/donations/admin.py b/donations/admin.py index b79f09b..43f87d3 100644 --- a/donations/admin.py +++ b/donations/admin.py @@ -7,10 +7,7 @@ @admin.register(Donation) class DonationAdmin(admin.ModelAdmin): - # list_display = [ - # field.name for field in Donation._meta.get_fields() if field.name != "id" - # ] - list_display = ["__all__"] + list_display = [field.name for field in Donation._meta.get_fields()] list_display.extend( [ "donor_address", From 6337caa2031bd8c893772f21340c40fcd8b68a72 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 14:41:09 -0400 Subject: [PATCH 035/127] fix typo in after_install --- scripts/after_install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 5ad3b7c..0bdcdf0 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -61,6 +61,6 @@ else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 sudo systemctl restart gunicorn celery -f +fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 5647bb27ed622aad65ec69a5d9d6d901ae18ac28 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 15:04:57 -0400 Subject: [PATCH 036/127] fix TokenHistoricalPrice price_usd null issue --- indexer_app/utils.py | 65 +++++++++++++++++++++++++++----------------- 1 file changed, 40 insertions(+), 25 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index bd142b4..3f12910 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -505,7 +505,15 @@ async def handle_batch_donations( ): logger.info("BAtch Transaction for donation...") for event_data in log_data: - await handle_new_donations(event_data["donation"], receiverId, signerId, actionName, receipt_obj, status_obj=None, log_data=[event_data]) + await handle_new_donations( + event_data["donation"], + receiverId, + signerId, + actionName, + receipt_obj, + status_obj=None, + log_data=[event_data], + ) async def handle_new_donations( @@ -588,20 +596,24 @@ async def handle_new_donations( endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" response = requests.get(endpoint) price_data = response.json() - unit_price = ( - price_data.get("market_data", {}).get("current_price", {}).get("usd") - ) - await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id - token=token, - price_usd=unit_price, - ) except Exception as e: - logger.warning(f"api rate limit? {e}") - # TODO: NB: below method has not been tested - # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async - historical = await TokenHistoricalPrice.objects.aget(token=token) - # print("fetched old price:", historical_price.price_usd) - unit_price = historical.price_usd + logger.warning(f"Failed to fetch price data: {e}") + # logger.debug(f"price data: {price_data}") + unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") + logger.debug(f"unit price: {unit_price}") + if unit_price: + try: + await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id + token=token, + price_usd=unit_price, + ) + except Exception as e: + logger.warning( + f"Error creating TokenHistoricalPrice: {e} token: {token} unit_price: {unit_price}" + ) + # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async + historical = await TokenHistoricalPrice.objects.aget(token=token) + unit_price = historical.price_usd total_amount = donation_data["total_amount"] net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) @@ -632,9 +644,7 @@ async def handle_new_donations( created = False if actionName == "direct": donation, created = await Donation.objects.aupdate_or_create( - on_chain_id=donation_data["id"], - defaults={}, - create_defaults=default_data + on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data ) # forgot why i didn't use else, but didn't for a reason. @@ -643,15 +653,12 @@ async def handle_new_donations( pot = await Pot.objects.aget(id=receiverId) default_data["pot"] = pot donation, created = await Donation.objects.aupdate_or_create( - on_chain_id=donation_data["id"], - defaults={}, - create_defaults=default_data + on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data ) - logger.info(f"Backfilling data? {created}") - if created: # only do updates if donation object was created + if created: # only do updates if donation object was created if actionName != "direct": @@ -683,7 +690,9 @@ async def handle_new_donations( # accountUpdate = {} else: - potUpdate["public_donations_count"] = (pot.public_donations_count or 0) + 1 + potUpdate["public_donations_count"] = ( + pot.public_donations_count or 0 + ) + 1 await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) @@ -726,14 +735,20 @@ async def handle_new_donations( ) -async def cache_block_height(key: str, height: int, block_count: int, block_timestamp: int) -> int: +async def cache_block_height( + key: str, height: int, block_count: int, block_timestamp: int +) -> int: await cache.aset(key, height) # the cache os the default go to for the restart block, the db is a backup if the redis server crashes. if (block_count % int(settings.BLOCK_SAVE_HEIGHT or 400)) == 0: logger.info(f"saving daylight, {height}") await BlockHeight.objects.aupdate_or_create( id=1, - defaults={"block_height": height, "block_timestamp": datetime.fromtimestamp(block_timestamp / 1000000000), "updated_at": timezone.now()} + defaults={ + "block_height": height, + "block_timestamp": datetime.fromtimestamp(block_timestamp / 1000000000), + "updated_at": timezone.now(), + }, ) # better than ovverriding model's save method to get a singleton? we need only one entry return height From 9550888967639250c8c429b2ea45cfc5a5bc941b Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 15:12:19 -0400 Subject: [PATCH 037/127] add more debug logs --- indexer_app/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 3f12910..8aafe56 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -595,10 +595,11 @@ async def handle_new_donations( logger.info("fetching historical price...") endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" response = requests.get(endpoint) + logger.debug(f"response: {response}") price_data = response.json() except Exception as e: logger.warning(f"Failed to fetch price data: {e}") - # logger.debug(f"price data: {price_data}") + logger.debug(f"price data: {price_data}") unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") logger.debug(f"unit price: {unit_price}") if unit_price: From 567292bebfed8ce886ac1309062960aaa14faae6 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 15:17:33 -0400 Subject: [PATCH 038/127] change debug to info --- indexer_app/utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 8aafe56..aab1ead 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -593,15 +593,17 @@ async def handle_new_donations( # late_p = await token.get_most_recent_price() try: logger.info("fetching historical price...") + logger.info("donated at: {donated_at}") endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" + logger.info(f"endpoint: {endpoint}") response = requests.get(endpoint) - logger.debug(f"response: {response}") + logger.info(f"response: {response}") price_data = response.json() except Exception as e: logger.warning(f"Failed to fetch price data: {e}") - logger.debug(f"price data: {price_data}") + logger.info(f"price data: {price_data}") unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") - logger.debug(f"unit price: {unit_price}") + logger.info(f"unit price: {unit_price}") if unit_price: try: await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id From dce6e72ad524426b46551196434b9ae2a5e7a520 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 15:23:28 -0400 Subject: [PATCH 039/127] fix typo --- indexer_app/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index aab1ead..e38582f 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -593,7 +593,7 @@ async def handle_new_donations( # late_p = await token.get_most_recent_price() try: logger.info("fetching historical price...") - logger.info("donated at: {donated_at}") + logger.info(f"donated at: {donated_at}") endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" logger.info(f"endpoint: {endpoint}") response = requests.get(endpoint) From 680d5a0d0da63447b4ff2c005471df62efa92d4d Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 15:27:35 -0400 Subject: [PATCH 040/127] use logger.debug --- indexer_app/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index e38582f..eabe4a7 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -593,17 +593,17 @@ async def handle_new_donations( # late_p = await token.get_most_recent_price() try: logger.info("fetching historical price...") - logger.info(f"donated at: {donated_at}") + logger.debug(f"donated at: {donated_at}") endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" - logger.info(f"endpoint: {endpoint}") + logger.debug(f"endpoint: {endpoint}") response = requests.get(endpoint) - logger.info(f"response: {response}") + logger.debug(f"response: {response}") price_data = response.json() except Exception as e: logger.warning(f"Failed to fetch price data: {e}") - logger.info(f"price data: {price_data}") + logger.debug(f"price data: {price_data}") unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") - logger.info(f"unit price: {unit_price}") + logger.debug(f"unit price: {unit_price}") if unit_price: try: await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id From 6ce7d63c2f36568d420614aa42685702605b944a Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 15:36:02 -0400 Subject: [PATCH 041/127] revert to logger.info --- indexer_app/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index eabe4a7..e38582f 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -593,17 +593,17 @@ async def handle_new_donations( # late_p = await token.get_most_recent_price() try: logger.info("fetching historical price...") - logger.debug(f"donated at: {donated_at}") + logger.info(f"donated at: {donated_at}") endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" - logger.debug(f"endpoint: {endpoint}") + logger.info(f"endpoint: {endpoint}") response = requests.get(endpoint) - logger.debug(f"response: {response}") + logger.info(f"response: {response}") price_data = response.json() except Exception as e: logger.warning(f"Failed to fetch price data: {e}") - logger.debug(f"price data: {price_data}") + logger.info(f"price data: {price_data}") unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") - logger.debug(f"unit price: {unit_price}") + logger.info(f"unit price: {unit_price}") if unit_price: try: await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id From e13d46932df47b2a715b75c77e29b0d007501b14 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 16:13:01 -0400 Subject: [PATCH 042/127] fix coingecko api error handling --- indexer_app/utils.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index e38582f..aa57c8d 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -598,6 +598,8 @@ async def handle_new_donations( logger.info(f"endpoint: {endpoint}") response = requests.get(endpoint) logger.info(f"response: {response}") + if response.status_code == 429: + logger.warning("Coingecko rate limit exceeded") price_data = response.json() except Exception as e: logger.warning(f"Failed to fetch price data: {e}") @@ -621,13 +623,13 @@ async def handle_new_donations( total_amount = donation_data["total_amount"] net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) - # Calculate USD amounts - totalnearAmount = format_to_near(total_amount) - netnearAmount = format_to_near(net_amount) - total_amount_usd = unit_price * totalnearAmount - net_amount_usd = unit_price * netnearAmount + # Calculate and format amounts + total_near_amount = format_to_near(total_amount) + net_near_amount = format_to_near(net_amount) + total_amount_usd = None if not unit_price else unit_price * total_near_amount + net_amount_usd = None if not unit_price else unit_price * net_near_amount - logger.info(f"inserting donations... by {actionName}, {total_amount_usd}") + logger.info(f"inserting donations... by {actionName}") default_data = { "donor": donor, "total_amount": total_amount, @@ -644,6 +646,7 @@ async def handle_new_donations( "referrer_fee": donation_data.get("referrer_fee"), "tx_hash": receipt_obj.receipt_id, } + logger.info(f"default donation data: {default_data}") created = False if actionName == "direct": donation, created = await Donation.objects.aupdate_or_create( From 34b86d59cc5ed3503f4b692782951015de86fea4 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 16:14:48 -0400 Subject: [PATCH 043/127] add error test --- indexer_app/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index aa57c8d..fee4965 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -664,6 +664,8 @@ async def handle_new_donations( logger.info(f"Backfilling data? {created}") + logger.error(f"ERROR test") + if created: # only do updates if donation object was created if actionName != "direct": From e54861a1fe15e52efe0da6fa57614b8c4ad1b9e6 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 16:19:21 -0400 Subject: [PATCH 044/127] remove ERROR test --- indexer_app/utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index fee4965..aa57c8d 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -664,8 +664,6 @@ async def handle_new_donations( logger.info(f"Backfilling data? {created}") - logger.error(f"ERROR test") - if created: # only do updates if donation object was created if actionName != "direct": From c8583c179848f5672f5299c0bd309a74348cac31 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 16:55:50 -0400 Subject: [PATCH 045/127] save correct timestamp on TokenHistoricalPrice record --- base/utils.py | 11 ++++++----- indexer_app/utils.py | 1 + 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/base/utils.py b/base/utils.py index 64bfb03..4c443a1 100644 --- a/base/utils.py +++ b/base/utils.py @@ -12,14 +12,15 @@ def format_to_near(yocto_amount: str): near_amount = int(yocto_amount) / (10**24) return near_amount + def convert_ns_to_utc(ns_timestamp): # Convert nanoseconds to seconds (float) seconds = ns_timestamp / 1e9 - + # Create a datetime object from the seconds (UTC) utc_datetime = datetime.utcfromtimestamp(seconds) - + # Format the datetime object as a string - formatted_date = utc_datetime.strftime('%Y-%m-%d %H:%M:%S') - - return formatted_date \ No newline at end of file + formatted_date = utc_datetime.strftime("%Y-%m-%d %H:%M:%S") + + return formatted_date diff --git a/indexer_app/utils.py b/indexer_app/utils.py index aa57c8d..32328fd 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -611,6 +611,7 @@ async def handle_new_donations( await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id token=token, price_usd=unit_price, + timestamp=donated_at, ) except Exception as e: logger.warning( From 03559c75f16fdcd0d521300a9aa7c4ece11194eb Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 3 May 2024 16:56:15 -0400 Subject: [PATCH 046/127] get start block from cache --- indexer_app/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 9e23a2d..deb440b 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -50,8 +50,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - # start_block = get_block_height('current_block_height') - start_block = 104_858_762 # manually setting for debugging TODO: remove this + start_block = get_block_height("current_block_height") + # start_block = 104_858_762 # manually setting for debugging TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From a226c70abd2bba9ba5c17b0d347f6a6f69c161fc Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 09:29:57 -0400 Subject: [PATCH 047/127] fix None to decimal conversion bug --- indexer_app/utils.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 32328fd..90621d6 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -665,6 +665,12 @@ async def handle_new_donations( logger.info(f"Backfilling data? {created}") + # convert total_amount_usd and net_amount_usd from None + if total_amount_usd is None: + total_amount_usd = 0.0 + if net_amount_usd is None: + net_amount_usd = 0.0 + if created: # only do updates if donation object was created if actionName != "direct": @@ -705,7 +711,7 @@ async def handle_new_donations( # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) logger.info( - f"update totl donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" + f"update total donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" ) await Account.objects.filter(id=donor.id).aupdate( **{ From a75da815da993b3d22a60c535349b0060ed511e2 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 09:44:50 -0400 Subject: [PATCH 048/127] add action_result & type unique_together constraint on Activity --- .../0003_alter_activity_unique_together.py | 17 +++++++++ activities/models.py | 2 + indexer_app/tasks.py | 4 +- indexer_app/utils.py | 38 +++++++++++-------- 4 files changed, 43 insertions(+), 18 deletions(-) create mode 100644 activities/migrations/0003_alter_activity_unique_together.py diff --git a/activities/migrations/0003_alter_activity_unique_together.py b/activities/migrations/0003_alter_activity_unique_together.py new file mode 100644 index 0000000..e68f410 --- /dev/null +++ b/activities/migrations/0003_alter_activity_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 5.0.4 on 2024-05-06 13:39 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("activities", "0002_alter_activity_options"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="activity", + unique_together={("action_result", "type")}, + ), + ] diff --git a/activities/models.py b/activities/models.py index 62e31e8..b1bab6d 100644 --- a/activities/models.py +++ b/activities/models.py @@ -66,3 +66,5 @@ class Activity(models.Model): class Meta: verbose_name_plural = "Activities" + + unique_together = (("action_result", "type"),) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index deb440b..f5a776d 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -50,8 +50,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - start_block = get_block_height("current_block_height") - # start_block = 104_858_762 # manually setting for debugging TODO: remove this + # start_block = get_block_height("current_block_height") + start_block = 105_112_120 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 90621d6..8b20587 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -729,23 +729,29 @@ async def handle_new_donations( } await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) - # Insert activity record - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=donation.donated_at, - type=( - "Donate_Direct" - if actionName == "direct" - else ( - "Donate_Pot_Matching_Pool" - if donation.matching_pool - else "Donate_Pot_Public" - ) - ), - action_result=donation_data, - tx_hash=receipt_obj.receipt_id, + # Insert or update activity record + activity_type = ( + "Donate_Direct" + if actionName == "direct" + else ( + "Donate_Pot_Matching_Pool" + if donation.matching_pool + else "Donate_Pot_Public" + ) + ) + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": donation.donated_at, + "tx_hash": receipt_obj.receipt_id, + } + activity, created = Activity.objects.aupdate_or_create( + action_result=donation_data, type=activity_type, defaults=defaults ) + if created: + logger.info(f"Activity created: {activity}") + else: + logger.info(f"Activity updated: {activity}") async def cache_block_height( From da4ca3777700411f617123facc2a7279451db75f Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 09:54:59 -0400 Subject: [PATCH 049/127] create or update activity even if donation already exists --- indexer_app/utils.py | 64 ++++++++++++++++++++------------------------ 1 file changed, 29 insertions(+), 35 deletions(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 8b20587..3fc5aca 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -648,22 +648,16 @@ async def handle_new_donations( "tx_hash": receipt_obj.receipt_id, } logger.info(f"default donation data: {default_data}") - created = False - if actionName == "direct": - donation, created = await Donation.objects.aupdate_or_create( - on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data - ) - # forgot why i didn't use else, but didn't for a reason. if actionName != "direct": logger.info("selecting pot to make public donation update") pot = await Pot.objects.aget(id=receiverId) default_data["pot"] = pot - donation, created = await Donation.objects.aupdate_or_create( - on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data - ) - logger.info(f"Backfilling data? {created}") + donation, donation_created = await Donation.objects.aupdate_or_create( + on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data + ) + logger.info(f"Created donation? {donation_created}") # convert total_amount_usd and net_amount_usd from None if total_amount_usd is None: @@ -671,7 +665,31 @@ async def handle_new_donations( if net_amount_usd is None: net_amount_usd = 0.0 - if created: # only do updates if donation object was created + # Insert or update activity record + activity_type = ( + "Donate_Direct" + if actionName == "direct" + else ( + "Donate_Pot_Matching_Pool" + if donation.matching_pool + else "Donate_Pot_Public" + ) + ) + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": donation.donated_at, + "tx_hash": receipt_obj.receipt_id, + } + activity, activity_created = Activity.objects.aupdate_or_create( + action_result=donation_data, type=activity_type, defaults=defaults + ) + if activity_created: + logger.info(f"Activity created: {activity}") + else: + logger.info(f"Activity updated: {activity}") + + if donation_created: # only do stats updates if donation object was created if actionName != "direct": @@ -729,30 +747,6 @@ async def handle_new_donations( } await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) - # Insert or update activity record - activity_type = ( - "Donate_Direct" - if actionName == "direct" - else ( - "Donate_Pot_Matching_Pool" - if donation.matching_pool - else "Donate_Pot_Public" - ) - ) - defaults = { - "signer_id": signerId, - "receiver_id": receiverId, - "timestamp": donation.donated_at, - "tx_hash": receipt_obj.receipt_id, - } - activity, created = Activity.objects.aupdate_or_create( - action_result=donation_data, type=activity_type, defaults=defaults - ) - if created: - logger.info(f"Activity created: {activity}") - else: - logger.info(f"Activity updated: {activity}") - async def cache_block_height( key: str, height: int, block_count: int, block_timestamp: int From 077090dc6286040393d20647f6148a351066ccbb Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 10:04:32 -0400 Subject: [PATCH 050/127] await aupdate_or_create Activity call --- indexer_app/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 3fc5aca..cc78781 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -681,7 +681,7 @@ async def handle_new_donations( "timestamp": donation.donated_at, "tx_hash": receipt_obj.receipt_id, } - activity, activity_created = Activity.objects.aupdate_or_create( + activity, activity_created = await Activity.objects.aupdate_or_create( action_result=donation_data, type=activity_type, defaults=defaults ) if activity_created: From c4afcf1579de32c42b7c5c21fbe5df610b27a46a Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 10:07:47 -0400 Subject: [PATCH 051/127] remove manual start block setting --- indexer_app/handler.py | 54 +++++++++++++++++++++++++++--------------- indexer_app/tasks.py | 4 ++-- 2 files changed, 37 insertions(+), 21 deletions(-) diff --git a/indexer_app/handler.py b/indexer_app/handler.py index 45d5b1a..a3858fe 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -35,7 +35,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess "block_height", block_height ) # TODO: add custom timeout if it should be valid for longer than default (5 minutes) formatted_date = convert_ns_to_utc(block_timestamp) - logger.info(f"Block Height: {block_height}, Block Timestamp: {block_timestamp} ({formatted_date})") + logger.info( + f"Block Height: {block_height}, Block Timestamp: {block_timestamp} ({formatted_date})" + ) # if block_height == 111867204: # with open("indexer_outcome2.json", "w") as file: # file.write(f"{streamer_message}") @@ -108,11 +110,15 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess args_dict = json.loads(decoded_text) except UnicodeDecodeError: # Handle case where the byte sequence cannot be decoded to UTF-8 - logger.warning(f"Cannot decode args to UTF-8 text: {decoded_bytes}") + logger.warning( + f"Cannot decode args to UTF-8 text: {decoded_bytes}" + ) args_dict = {} except json.JSONDecodeError: # Handle case where the text cannot be parsed as JSON - logger.warning(f"Decoded text is not valid JSON: {decoded_text}") + logger.warning( + f"Decoded text is not valid JSON: {decoded_text}" + ) args_dict = {} match method_name: @@ -123,7 +129,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess args_dict, receiver_id, created_at ) else: - logger.info(f"new pot deployment: {args_dict}, {action}") + logger.info( + f"new pot deployment: {args_dict}, {action}" + ) await handle_new_pot( args_dict, receiver_id, @@ -135,7 +143,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "assert_can_apply_callback": - logger.info(f"application case: {args_dict}, {action}, {receipt}") + logger.info( + f"application case: {args_dict}, {action}, {receipt}" + ) await handle_pot_application( args_dict, receiver_id, @@ -147,7 +157,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "apply": - logger.info(f"application case 2: {args_dict}, {action}, {receipt}") + logger.info( + f"application case 2: {args_dict}, {action}, {receipt}" + ) await handle_pot_application( args_dict, receiver_id, @@ -159,14 +171,12 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "donate": # TODO: donate that produces result - logger.info(f"switching bazooka to knifee works!! donate his blood: {args_dict}, {receipt}, {action}, {log_data}") + logger.info( + f"switching bazooka to knifee works!! donate his blood: {args_dict}, {receipt}, {action}, {log_data}" + ) if len(log_data) > 1: await handle_batch_donations( - receiver_id, - signer_id, - "direct", - receipt, - log_data + receiver_id, signer_id, "direct", receipt, log_data ) else: await handle_new_donations( @@ -181,7 +191,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "handle_protocol_fee_callback": - logger.info(f"donations to pool incoming: {args_dict}, {receipt}, {receipt_execution_outcome}") + logger.info( + f"donations to pool incoming: {args_dict}, {receipt}, {receipt_execution_outcome}" + ) await handle_new_donations( args_dict, receiver_id, @@ -194,7 +206,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "transfer_funds_callback": - logger.info(f"new version donations to pool incoming: {args_dict}, {action}") + logger.info( + f"new version donations to pool incoming: {args_dict}, {action}" + ) await handle_new_donations( args_dict, receiver_id, @@ -209,7 +223,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess case ( "register_batch" ): # TODO: listen for create_registration event instead of method call - logger.info(f"registrations incoming: {args_dict}, {action}") + logger.info( + f"registrations incoming: {args_dict}, {action}" + ) if receiver_id != "lists.potlock.near": break await handle_new_list_registration( @@ -218,7 +234,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess break case "chef_set_application_status": - logger.info(f"application status change incoming: {args_dict}") + logger.info( + f"application status change incoming: {args_dict}" + ) await handle_pot_application_status_change( args_dict, receiver_id, signer_id, receipt, status_obj ) @@ -292,8 +310,6 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess # TODO: handle remove upvote except Exception as e: - logger.warning( - f"Error during parsing method call from JSON string to dict\n{e}" - ) + logger.error(f"Error in indexer handler:\n{e}") # with open("indexer_error.txt", "a") as file: # file.write(f"{e}\n") diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index f5a776d..c872e3a 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -50,8 +50,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - # start_block = get_block_height("current_block_height") - start_block = 105_112_120 # manually setting for reindexing TODO: remove this + start_block = get_block_height("current_block_height") + # start_block = 105_112_120 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 739df7814e45623c790d32b543797ccbb41d5dab Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 10:19:40 -0400 Subject: [PATCH 052/127] restart indexer at 105_159_364 --- indexer_app/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index c872e3a..c01a132 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -50,8 +50,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - start_block = get_block_height("current_block_height") - # start_block = 105_112_120 # manually setting for reindexing TODO: remove this + # start_block = get_block_height("current_block_height") + start_block = 105_159_364 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 9ecc0029985193b4becac994ff6de67d682b2f87 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 10:49:23 -0400 Subject: [PATCH 053/127] restart indexer at 105,363,093 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index c01a132..cbed4a3 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -51,7 +51,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 105_159_364 # manually setting for reindexing TODO: remove this + start_block = 105_363_093 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 1fb42f7590f384206fa088db508f855f0688e5cb Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 10:55:13 -0400 Subject: [PATCH 054/127] restart indexer at 105534694 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index cbed4a3..42f6389 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -51,7 +51,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 105_363_093 # manually setting for reindexing TODO: remove this + start_block = 105_534_694 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 0a2de4bd378c9a3926b6546eaff7f55c360732c0 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 10:56:05 -0400 Subject: [PATCH 055/127] restart indexer from current block height --- indexer_app/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 42f6389..3a2b859 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -50,8 +50,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - # start_block = get_block_height("current_block_height") - start_block = 105_534_694 # manually setting for reindexing TODO: remove this + start_block = get_block_height("current_block_height") + # start_block = 105_534_694 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 649e9dcc687c7886608d73120d6c4e666d3d6907 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 15:14:40 -0400 Subject: [PATCH 056/127] add celery recurring task for account totals --- base/celery.py | 20 ++++++++++++++------ donations/models.py | 1 + indexer_app/tasks.py | 45 +++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 59 insertions(+), 7 deletions(-) diff --git a/base/celery.py b/base/celery.py index ad77bb6..07d6000 100644 --- a/base/celery.py +++ b/base/celery.py @@ -1,15 +1,16 @@ import os - -from django.conf import settings +import ssl from celery import Celery -import ssl +from celery.schedules import crontab +from django.conf import settings os.environ.setdefault("DJANGO_SETTINGS_MODULE", "base.settings") -app = Celery("indexer", -# broker=settings.CELERY_BROKER_URL, -# backend=settings.CELERY_RESULT_BACKEND +app = Celery( + "indexer", + # broker=settings.CELERY_BROKER_URL, + # backend=settings.CELERY_RESULT_BACKEND ) # SSL configurations for broker and backend @@ -22,3 +23,10 @@ app.config_from_object("django.conf:settings", namespace="CELERY") app.autodiscover_tasks() + +app.conf.beat_schedule = { + "update_account_statistics_every_5_minutes": { + "task": "indexer_app.tasks.update_account_statistics", + "schedule": crontab(minute="*/5"), # Executes every 5 minutes + }, +} diff --git a/donations/models.py b/donations/models.py index 64e1500..355b5f6 100644 --- a/donations/models.py +++ b/donations/models.py @@ -92,6 +92,7 @@ class Donation(models.Model): related_name="received_donations", null=True, help_text=_("Donation recipient."), + db_index=True, ) protocol_fee = models.CharField( _("protocol fee"), diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 3a2b859..a2a8a26 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -2,10 +2,15 @@ from pathlib import Path from celery import shared_task +from celery.signals import task_revoked from django.conf import settings +from django.db.models import Count, Sum from near_lake_framework import LakeConfig, streamer +from accounts.models import Account +from donations.models import Donation from indexer_app.handler import handle_streamer_message +from pots.models import PotPayout from .logging import logger from .utils import cache_block_height, get_block_height @@ -58,7 +63,45 @@ def listen_to_near_events(): loop.close() -from celery.signals import task_revoked +@shared_task +def update_account_statistics(): + # Logic to update account statistics + print("Updating account statistics...") + for account in Account.objects.all(): + # donors count + account.donors_count = Donation.objects.filter(recipient=account).aggregate( + Count("donor", distinct=True) + ) + # donations received usd + account.total_donations_in_usd = ( + Donation.objects.filter(recipient=account).aggregate( + Sum("total_amount_usd") + )["total_amount_usd__sum"] + or 0 + ) + # donations sent usd + account.total_donations_out_usd = ( + Donation.objects.filter(donor=account).aggregate(Sum("total_amount_usd"))[ + "total_amount_usd__sum" + ] + or 0 + ) + # matching pool allocations usd + account.total_matching_pool_allocations_usd = ( + PotPayout.objects.filter( + recipient=account, paid_at__isnull=False + ).aggregate(Sum("amount_usd"))["amount_usd__sum"] + or 0 + ) + # save changes + account.save( + update_fields=[ + "donors_count", + "total_donations_in_usd", + "total_donations_out_usd", + "total_matching_pool_allocations_usd", + ] + ) @task_revoked.connect From 51cb8287ad62e08ed31b28b5c74c08326cebe1b2 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 15:27:31 -0400 Subject: [PATCH 057/127] update after_install to include celery beat service --- scripts/after_install.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 0bdcdf0..abb4676 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -50,17 +50,17 @@ PENDING_MIGRATIONS=$(python manage.py showmigrations | grep "\[ \]" | wc -l) # if [ "$PENDING_MIGRATIONS" -gt 0 ]; then echo "Migrations found; stopping services..." >> "$LOG_FILE" - sudo systemctl stop gunicorn celery + sudo systemctl stop gunicorn celery-worker celery-beat echo 'Applying migrations...' >> "$LOG_FILE" python manage.py migrate >> "$LOG_FILE" 2>&1 echo 'Starting services...' >> "$LOG_FILE" - sudo systemctl start gunicorn celery + sudo systemctl start gunicorn celery-worker celery-beat else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 - sudo systemctl restart gunicorn celery + sudo systemctl restart gunicorn celery-worker celery-beat fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From f6883e451c0b32135ac1f2b6dc1f15cb4f384833 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 15:47:37 -0400 Subject: [PATCH 058/127] use logger in update_account_statistics --- indexer_app/tasks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index a2a8a26..08d6f77 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -66,7 +66,7 @@ def listen_to_near_events(): @shared_task def update_account_statistics(): # Logic to update account statistics - print("Updating account statistics...") + logger.info("Updating account statistics...") for account in Account.objects.all(): # donors count account.donors_count = Donation.objects.filter(recipient=account).aggregate( @@ -102,6 +102,7 @@ def update_account_statistics(): "total_matching_pool_allocations_usd", ] ) + logger.info(f"Account {account.id} statistics updated.") @task_revoked.connect From 39dd68d88b84de76a67c4791781d5c650549842e Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 15:56:29 -0400 Subject: [PATCH 059/127] add jobs logger --- base/settings.py | 6 ++++++ indexer_app/tasks.py | 3 +++ 2 files changed, 9 insertions(+) diff --git a/base/settings.py b/base/settings.py index 41f5753..6ba4997 100644 --- a/base/settings.py +++ b/base/settings.py @@ -223,6 +223,11 @@ "level": log_level, "propagate": False, }, + "jobs": { + "handlers": ["console"], + "level": log_level, + "propagate": False, + }, "": {"handlers": ["console"], "level": log_level}, # root logger }, } @@ -241,6 +246,7 @@ LOGGING["loggers"][""]["handlers"].append("watchtower") LOGGING["loggers"]["django"]["handlers"].append("watchtower") LOGGING["loggers"]["indexer"]["handlers"].append("watchtower") + LOGGING["loggers"]["jobs"]["handlers"].append("watchtower") # log_level = getattr(logging, LOG_LEVEL, logging.INFO) # print("LOG_LEVEL: ", LOG_LEVEL) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 08d6f77..c5883b6 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -1,4 +1,5 @@ import asyncio +import logging from pathlib import Path from celery import shared_task @@ -65,9 +66,11 @@ def listen_to_near_events(): @shared_task def update_account_statistics(): + logger = logging.getLogger("jobs") # Logic to update account statistics logger.info("Updating account statistics...") for account in Account.objects.all(): + logger.info(f"Updating statistics for account {account.id}...") # donors count account.donors_count = Donation.objects.filter(recipient=account).aggregate( Count("donor", distinct=True) From 04a46193270cdf78395ec92afdbcda99e9acb59b Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 16:27:45 -0400 Subject: [PATCH 060/127] debugging account stats task --- indexer_app/tasks.py | 86 ++++++++++++++++++++++++-------------------- 1 file changed, 48 insertions(+), 38 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index c5883b6..17eca58 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -5,6 +5,7 @@ from celery import shared_task from celery.signals import task_revoked from django.conf import settings +from django.db import transaction from django.db.models import Count, Sum from near_lake_framework import LakeConfig, streamer @@ -67,45 +68,54 @@ def listen_to_near_events(): @shared_task def update_account_statistics(): logger = logging.getLogger("jobs") - # Logic to update account statistics logger.info("Updating account statistics...") - for account in Account.objects.all(): - logger.info(f"Updating statistics for account {account.id}...") - # donors count - account.donors_count = Donation.objects.filter(recipient=account).aggregate( - Count("donor", distinct=True) - ) - # donations received usd - account.total_donations_in_usd = ( - Donation.objects.filter(recipient=account).aggregate( - Sum("total_amount_usd") - )["total_amount_usd__sum"] - or 0 - ) - # donations sent usd - account.total_donations_out_usd = ( - Donation.objects.filter(donor=account).aggregate(Sum("total_amount_usd"))[ - "total_amount_usd__sum" - ] - or 0 - ) - # matching pool allocations usd - account.total_matching_pool_allocations_usd = ( - PotPayout.objects.filter( - recipient=account, paid_at__isnull=False - ).aggregate(Sum("amount_usd"))["amount_usd__sum"] - or 0 - ) - # save changes - account.save( - update_fields=[ - "donors_count", - "total_donations_in_usd", - "total_donations_out_usd", - "total_matching_pool_allocations_usd", - ] - ) - logger.info(f"Account {account.id} statistics updated.") + + accounts = Account.objects.all() + for account in accounts: + try: + with transaction.atomic(): + logger.info(f"Updating statistics for account {account.id}...") + # donors count + account.donors_count = Donation.objects.filter( + recipient=account + ).aggregate(Count("donor", distinct=True))["donor__count"] + + # donations received usd + account.total_donations_in_usd = ( + Donation.objects.filter(recipient=account).aggregate( + Sum("total_amount_usd") + )["total_amount_usd__sum"] + or 0 + ) + + # donations sent usd + account.total_donations_out_usd = ( + Donation.objects.filter(donor=account).aggregate( + Sum("total_amount_usd") + )["total_amount_usd__sum"] + or 0 + ) + + # matching pool allocations usd + account.total_matching_pool_allocations_usd = ( + PotPayout.objects.filter( + recipient=account, paid_at__isnull=False + ).aggregate(Sum("amount_usd"))["amount_usd__sum"] + or 0 + ) + + # Save changes + account.save( + update_fields=[ + "donors_count", + "total_donations_in_usd", + "total_donations_out_usd", + "total_matching_pool_allocations_usd", + ] + ) + logger.info(f"Account {account.id} statistics updated.") + except Exception as e: + logger.error(f"Failed to update statistics for account {account.id}: {e}") @task_revoked.connect From 02f615785f071a5cfdf7ab581c8044c18058dcdf Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 16:31:49 -0400 Subject: [PATCH 061/127] fix typo in account stats job --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 17eca58..1d421a3 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -100,7 +100,7 @@ def update_account_statistics(): account.total_matching_pool_allocations_usd = ( PotPayout.objects.filter( recipient=account, paid_at__isnull=False - ).aggregate(Sum("amount_usd"))["amount_usd__sum"] + ).aggregate(Sum("amount_paid_usd"))["amount_paid_usd__sum"] or 0 ) From b1d07c492fc2d291f957f0332ea707d1c0652e2d Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 6 May 2024 16:53:18 -0400 Subject: [PATCH 062/127] remove transaction.atomic in account stats task --- indexer_app/tasks.py | 82 ++++++++++++++++++++++---------------------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 1d421a3..a775b85 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -73,47 +73,47 @@ def update_account_statistics(): accounts = Account.objects.all() for account in accounts: try: - with transaction.atomic(): - logger.info(f"Updating statistics for account {account.id}...") - # donors count - account.donors_count = Donation.objects.filter( - recipient=account - ).aggregate(Count("donor", distinct=True))["donor__count"] - - # donations received usd - account.total_donations_in_usd = ( - Donation.objects.filter(recipient=account).aggregate( - Sum("total_amount_usd") - )["total_amount_usd__sum"] - or 0 - ) - - # donations sent usd - account.total_donations_out_usd = ( - Donation.objects.filter(donor=account).aggregate( - Sum("total_amount_usd") - )["total_amount_usd__sum"] - or 0 - ) - - # matching pool allocations usd - account.total_matching_pool_allocations_usd = ( - PotPayout.objects.filter( - recipient=account, paid_at__isnull=False - ).aggregate(Sum("amount_paid_usd"))["amount_paid_usd__sum"] - or 0 - ) - - # Save changes - account.save( - update_fields=[ - "donors_count", - "total_donations_in_usd", - "total_donations_out_usd", - "total_matching_pool_allocations_usd", - ] - ) - logger.info(f"Account {account.id} statistics updated.") + # with transaction.atomic(): + logger.info(f"Updating statistics for account {account.id}...") + # donors count + account.donors_count = Donation.objects.filter(recipient=account).aggregate( + Count("donor", distinct=True) + )["donor__count"] + + # donations received usd + account.total_donations_in_usd = ( + Donation.objects.filter(recipient=account).aggregate( + Sum("total_amount_usd") + )["total_amount_usd__sum"] + or 0 + ) + + # donations sent usd + account.total_donations_out_usd = ( + Donation.objects.filter(donor=account).aggregate( + Sum("total_amount_usd") + )["total_amount_usd__sum"] + or 0 + ) + + # matching pool allocations usd + account.total_matching_pool_allocations_usd = ( + PotPayout.objects.filter( + recipient=account, paid_at__isnull=False + ).aggregate(Sum("amount_paid_usd"))["amount_paid_usd__sum"] + or 0 + ) + + # Save changes + account.save( + update_fields=[ + "donors_count", + "total_donations_in_usd", + "total_donations_out_usd", + "total_matching_pool_allocations_usd", + ] + ) + logger.info(f"Account {account.id} statistics updated.") except Exception as e: logger.error(f"Failed to update statistics for account {account.id}: {e}") From d803716c101157f81a5693e04eacfd8c0dca2f96 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 08:51:36 -0400 Subject: [PATCH 063/127] update after_install and celery scripts to handle beat start/stop --- base/celery.py | 7 +++++++ scripts/after_install.sh | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/base/celery.py b/base/celery.py index 07d6000..801159f 100644 --- a/base/celery.py +++ b/base/celery.py @@ -28,5 +28,12 @@ "update_account_statistics_every_5_minutes": { "task": "indexer_app.tasks.update_account_statistics", "schedule": crontab(minute="*/5"), # Executes every 5 minutes + "options": {"queue": "beat_tasks"}, }, } + +app.conf.task_routes = { + "indexer_app.tasks.update_account_statistics": {"queue": "beat_tasks"}, +} + +# TODO: (pick up here): rename & restart services, using correct workers diff --git a/scripts/after_install.sh b/scripts/after_install.sh index abb4676..c847c6c 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -50,17 +50,17 @@ PENDING_MIGRATIONS=$(python manage.py showmigrations | grep "\[ \]" | wc -l) # if [ "$PENDING_MIGRATIONS" -gt 0 ]; then echo "Migrations found; stopping services..." >> "$LOG_FILE" - sudo systemctl stop gunicorn celery-worker celery-beat + sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker echo 'Applying migrations...' >> "$LOG_FILE" python manage.py migrate >> "$LOG_FILE" 2>&1 echo 'Starting services...' >> "$LOG_FILE" - sudo systemctl start gunicorn celery-worker celery-beat + sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 - sudo systemctl restart gunicorn celery-worker celery-beat + sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 058363518f4269ac3b35344889e508730ad5dcdc Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 08:56:39 -0400 Subject: [PATCH 064/127] remove excess logs for update_account_statistics job --- indexer_app/tasks.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index a775b85..9b019b5 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -68,13 +68,13 @@ def listen_to_near_events(): @shared_task def update_account_statistics(): logger = logging.getLogger("jobs") - logger.info("Updating account statistics...") accounts = Account.objects.all() + accounts_count = accounts.count() + logger.info(f"Updating statistics for {accounts_count} accounts...") for account in accounts: try: - # with transaction.atomic(): - logger.info(f"Updating statistics for account {account.id}...") + # logger.info(f"Updating statistics for account {account.id}...") # donors count account.donors_count = Donation.objects.filter(recipient=account).aggregate( Count("donor", distinct=True) @@ -113,9 +113,10 @@ def update_account_statistics(): "total_matching_pool_allocations_usd", ] ) - logger.info(f"Account {account.id} statistics updated.") + # logger.info(f"Account {account.id} statistics updated.") except Exception as e: logger.error(f"Failed to update statistics for account {account.id}: {e}") + logger.info(f"Account stats for {accounts.count()} accounts updated.") @task_revoked.connect From c5df8892e96784ec93c6c807e892d93b70adec8f Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 10:00:16 -0400 Subject: [PATCH 065/127] add celery-beat to restart in after_install --- scripts/after_install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index c847c6c..b5acf94 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -56,11 +56,11 @@ if [ "$PENDING_MIGRATIONS" -gt 0 ]; then python manage.py migrate >> "$LOG_FILE" 2>&1 echo 'Starting services...' >> "$LOG_FILE" - sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker + sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 - sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker + sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 2c243f66b26017a2ec2caff3058338c9b43b53ad Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 10:10:55 -0400 Subject: [PATCH 066/127] remove celery-beat from after_install --- scripts/after_install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index b5acf94..c847c6c 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -56,11 +56,11 @@ if [ "$PENDING_MIGRATIONS" -gt 0 ]; then python manage.py migrate >> "$LOG_FILE" 2>&1 echo 'Starting services...' >> "$LOG_FILE" - sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat + sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 - sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat + sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 048ea99977d6548b83ff8d6c9edc4ab9c7d8437a Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 14:09:14 -0400 Subject: [PATCH 067/127] move fetch_usd_prices to Donation model method --- base/logging.py | 3 +- base/settings.py | 4 + donations/models.py | 99 +++++++++++++++++++- indexer_app/tasks.py | 37 ++++++-- indexer_app/utils.py | 211 ++++++++++++++++++++++--------------------- tokens/models.py | 4 + 6 files changed, 246 insertions(+), 112 deletions(-) diff --git a/base/logging.py b/base/logging.py index cda9ae6..12bef01 100644 --- a/base/logging.py +++ b/base/logging.py @@ -1,3 +1,4 @@ import logging -logger = logging.getLogger("django") \ No newline at end of file +logger = logging.getLogger("django") +jobs_logger = logging.getLogger("jobs") diff --git a/base/settings.py b/base/settings.py index 6ba4997..02a6664 100644 --- a/base/settings.py +++ b/base/settings.py @@ -53,6 +53,10 @@ BLOCK_SAVE_HEIGHT = os.environ.get("BLOCK_SAVE_HEIGHT") +COINGECKO_URL = "https://api.coingecko.com/api/v3" +# Number of hours around a given timestamp for querying historical prices +HISTORICAL_PRICE_QUERY_HOURS = 24 + # Application definition INSTALLED_APPS = [ diff --git a/donations/models.py b/donations/models.py index 355b5f6..7f39a4f 100644 --- a/donations/models.py +++ b/donations/models.py @@ -1,9 +1,15 @@ -from django import db +from datetime import timedelta + +import requests +from django.conf import settings from django.db import models from django.utils.translation import gettext_lazy as _ from accounts.models import Account +from base.logging import logger +from base.utils import format_date from pots.models import Pot +from tokens.models import Token, TokenHistoricalPrice class Donation(models.Model): @@ -54,7 +60,7 @@ class Donation(models.Model): help_text=_("Net amount in USD."), ) ft = models.ForeignKey( - Account, + Account, # should probably be Token on_delete=models.CASCADE, related_name="ft_donations", null=False, @@ -167,3 +173,92 @@ class Meta: name="unique_on_chain_id_with_pot", ), ] + + async def get_ft_token(self): + try: + token = await Token.objects.aget(id=self.ft) + except Token.DoesNotExist: + # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common + token = await Token.objects.acreate(id=self.ft, decimals=12) + return token + + ### Fetches USD prices for the Donation record and saves USD totals + async def fetch_usd_prices(self): + # get existing values for stats adjustments later + existing_total_amount_usd = self.total_amount_usd + existing_net_amount_usd = self.net_amount_usd + existing_protocol_fee_usd = self.protocol_fee_usd + existing_referrer_fee_usd = self.referrer_fee_usd + existing_chef_fee_usd = self.chef_fee_usd + # first, see if there is a TokenHistoricalPrice within 1 day (or HISTORICAL_PRICE_QUERY_HOURS) of self.donated_at + token = await self.get_ft_token() + time_window = timedelta(hours=settings.HISTORICAL_PRICE_QUERY_HOURS or 24) + token_prices = TokenHistoricalPrice.objects.filter( + token=token, + timestamp__gte=self.donated_at - time_window, + timestamp__lte=self.donated_at + time_window, + ) + existing_token_price = token_prices.first() + logger.info(f"existing token price: {existing_token_price}") + total_amount = token.format_price(self.total_amount) + net_amount = token.format_price(self.net_amount) + protocol_amount = token.format_price(self.protocol_fee) + referrer_amount = token.format_price(self.referrer_fee or "0") + chef_amount = token.format_price(self.chef_fee) + if existing_token_price: + try: + price_usd = existing_token_price.price_usd + self.total_amount_usd = total_amount * price_usd + self.net_amount_usd = net_amount * price_usd + self.protocol_fee_usd = protocol_amount * price_usd + self.referrer_fee_usd = referrer_amount * price_usd + self.chef_fee_usd = chef_amount * price_usd + self.save() + except Exception as e: + logger.error( + f"Failed to calculate and save USD prices using existing TokenHistoricalPrice: {e}" + ) + # TODO: update totals for relevant accounts + else: + # no existing price within acceptable time period; fetch from coingecko + try: + logger.info( + "No existing price within acceptable time period; fetching historical price..." + ) + endpoint = f"{settings.COINGECKO_URL}/coins/{self.ft.id}/history?date={format_date(self.donated_at)}&localization=false" + logger.info(f"coingecko endpoint: {endpoint}") + response = requests.get(endpoint) + logger.info(f"coingecko response: {response}") + if response.status_code == 429: + logger.warning("Coingecko rate limit exceeded") + price_data = response.json() + except Exception as e: + logger.warning(f"Failed to fetch coingecko price data: {e}") + logger.info(f"coingecko price data: {price_data}") + price_usd = ( + price_data.get("market_data", {}).get("current_price", {}).get("usd") + ) + logger.info(f"unit price: {price_usd}") + if price_usd: + try: + self.total_amount_usd = total_amount * price_usd + self.net_amount_usd = net_amount * price_usd + self.protocol_fee_usd = protocol_amount * price_usd + self.referrer_fee_usd = referrer_amount * price_usd + self.chef_fee_usd = chef_amount * price_usd + self.save() + except Exception as e: + logger.error( + f"Failed to calculate and save USD prices using fetched price: {e}" + ) + # TODO: update totals for relevant accounts + try: + await TokenHistoricalPrice.objects.acreate( + token=token, + price_usd=price_usd, + timestamp=self.donated_at, + ) + except Exception as e: + logger.warning( + f"Error creating TokenHistoricalPrice: {e} token: {token} price_usd: {price_usd}" + ) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 9b019b5..93eafb0 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -6,7 +6,7 @@ from celery.signals import task_revoked from django.conf import settings from django.db import transaction -from django.db.models import Count, Sum +from django.db.models import Count, Q, Sum from near_lake_framework import LakeConfig, streamer from accounts.models import Account @@ -65,16 +65,37 @@ def listen_to_near_events(): loop.close() +jobs_logger = logging.getLogger("jobs") + + +@shared_task +async def fetch_usd_prices(): + # fetch all Donation records that have a null total_amount_usd or net_amount_usd + # get closest TokenHistoricalPrice record for each donation, and check if it's within 1 day + # if not, fetch the price from coingecko API, create new TokenHistoricalPrice record and update the Donation record + + donations = Donation.objects.filter( + Q(total_amount_usd__isnull=True) | Q(net_amount_usd__isnull=True) + ) + jobs_logger.info(f"Fetching USD prices for {donations.count()} donations...") + for donation in donations: + try: + await donation.fetch_usd_prices() + except Exception as e: + jobs_logger.error( + f"Failed to fetch USD prices for donation {donation.id}: {e}" + ) + + @shared_task def update_account_statistics(): - logger = logging.getLogger("jobs") accounts = Account.objects.all() accounts_count = accounts.count() - logger.info(f"Updating statistics for {accounts_count} accounts...") + jobs_logger.info(f"Updating statistics for {accounts_count} accounts...") for account in accounts: try: - # logger.info(f"Updating statistics for account {account.id}...") + # jobs_logger.info(f"Updating statistics for account {account.id}...") # donors count account.donors_count = Donation.objects.filter(recipient=account).aggregate( Count("donor", distinct=True) @@ -113,10 +134,12 @@ def update_account_statistics(): "total_matching_pool_allocations_usd", ] ) - # logger.info(f"Account {account.id} statistics updated.") + # jobs_logger.info(f"Account {account.id} statistics updated.") except Exception as e: - logger.error(f"Failed to update statistics for account {account.id}: {e}") - logger.info(f"Account stats for {accounts.count()} accounts updated.") + jobs_logger.error( + f"Failed to update statistics for account {account.id}: {e}" + ) + jobs_logger.info(f"Account stats for {accounts.count()} accounts updated.") @task_revoked.connect diff --git a/indexer_app/utils.py b/indexer_app/utils.py index cc78781..e6560d4 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -27,7 +27,7 @@ from .logging import logger -GECKO_URL = "https://api.coingecko.com/api/v3" +# GECKO_URL = "https://api.coingecko.com/api/v3" # TODO: move to settings async def handle_new_pot( @@ -589,53 +589,55 @@ async def handle_new_donations( # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common token = await Token.objects.acreate(id=token_acct, decimals=12) - # Fetch historical token data - # late_p = await token.get_most_recent_price() - try: - logger.info("fetching historical price...") - logger.info(f"donated at: {donated_at}") + # # Fetch historical token data + # # late_p = await token.get_most_recent_price() + # try: + # logger.info("fetching historical price...") + # logger.info(f"donated at: {donated_at}") endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" - logger.info(f"endpoint: {endpoint}") - response = requests.get(endpoint) - logger.info(f"response: {response}") - if response.status_code == 429: - logger.warning("Coingecko rate limit exceeded") - price_data = response.json() - except Exception as e: - logger.warning(f"Failed to fetch price data: {e}") - logger.info(f"price data: {price_data}") - unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") - logger.info(f"unit price: {unit_price}") - if unit_price: - try: - await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id - token=token, - price_usd=unit_price, - timestamp=donated_at, - ) - except Exception as e: - logger.warning( - f"Error creating TokenHistoricalPrice: {e} token: {token} unit_price: {unit_price}" - ) - # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async - historical = await TokenHistoricalPrice.objects.aget(token=token) - unit_price = historical.price_usd + # logger.info(f"endpoint: {endpoint}") + # response = requests.get(endpoint) + # logger.info(f"response: {response}") + # if response.status_code == 429: + # logger.warning("Coingecko rate limit exceeded") + # price_data = response.json() + # except Exception as e: + # logger.warning(f"Failed to fetch price data: {e}") + # logger.info(f"price data: {price_data}") + # unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") + # logger.info(f"unit price: {unit_price}") + # if unit_price: + # try: + # await TokenHistoricalPrice.objects.acreate( # need to change token model to use token as id + # token=token, + # price_usd=unit_price, + # timestamp=donated_at, + # ) + # except Exception as e: + # logger.warning( + # f"Error creating TokenHistoricalPrice: {e} token: {token} unit_price: {unit_price}" + # ) + # # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async + # historical = await TokenHistoricalPrice.objects.aget(token=token) + # unit_price = historical.price_usd + + # total_amount = donation_data["total_amount"] + # net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) + + # # Calculate and format amounts + # total_near_amount = format_to_near(total_amount) + # net_near_amount = format_to_near(net_amount) + # total_amount_usd = None if not unit_price else unit_price * total_near_amount + # net_amount_usd = None if not unit_price else unit_price * net_near_amount total_amount = donation_data["total_amount"] - net_amount = net_amount - int(donation_data.get("referrer_fee") or 0) - - # Calculate and format amounts - total_near_amount = format_to_near(total_amount) - net_near_amount = format_to_near(net_amount) - total_amount_usd = None if not unit_price else unit_price * total_near_amount - net_amount_usd = None if not unit_price else unit_price * net_near_amount logger.info(f"inserting donations... by {actionName}") default_data = { "donor": donor, "total_amount": total_amount, - "total_amount_usd": total_amount_usd, - "net_amount_usd": net_amount_usd, + "total_amount_usd": None, # USD amounts will be added later (could be in pre-save hook) + "net_amount_usd": None, "net_amount": net_amount, "ft": token_acct, "message": donation_data.get("message"), @@ -659,11 +661,14 @@ async def handle_new_donations( ) logger.info(f"Created donation? {donation_created}") - # convert total_amount_usd and net_amount_usd from None - if total_amount_usd is None: - total_amount_usd = 0.0 - if net_amount_usd is None: - net_amount_usd = 0.0 + # fetch USD prices + await donation.fetch_usd_prices() # might not need to await this? + + # # convert total_amount_usd and net_amount_usd from None + # if total_amount_usd is None: + # total_amount_usd = 0.0 + # if net_amount_usd is None: + # net_amount_usd = 0.0 # Insert or update activity record activity_type = ( @@ -689,63 +694,65 @@ async def handle_new_donations( else: logger.info(f"Activity updated: {activity}") - if donation_created: # only do stats updates if donation object was created - - if actionName != "direct": - - potUpdate = { - "total_public_donations": str( - int(pot.total_public_donations or 0) + int(total_amount) - ), - "total_public_donations_usd": int(pot.total_public_donations_usd or 0.0) - + total_amount_usd, - } - if donation_data.get("matching_pool"): - potUpdate["total_matching_pool"] = str( - int(pot.total_matching_pool or 0) + int(total_amount) - ) - potUpdate["total_matching_pool"] = ( - pot.total_matching_pool_usd or 0.0 - ) + total_amount_usd - potUpdate["matching_pool_donations_count"] = ( - pot.matching_pool_donations_count or 0 - ) + 1 - - if recipient: - await Account.objects.filter(id=recipient.id).aupdate( - **{ - "total_matching_pool_allocations_usd": recipient.total_matching_pool_allocations_usd - + total_amount_usd - } - ) - - # accountUpdate = {} - else: - potUpdate["public_donations_count"] = ( - pot.public_donations_count or 0 - ) + 1 - - await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) - - # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) - logger.info( - f"update total donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" - ) - await Account.objects.filter(id=donor.id).aupdate( - **{ - "total_donations_out_usd": donor.total_donations_out_usd - + decimal.Decimal(total_amount_usd) - } - ) - if recipient: - acct = await Account.objects.aget(id=recipient.id) - logger.info(f"selected {acct} to perform donor count update") - acctUpdate = { - "donors_count": acct.donors_count + 1, - "total_donations_in_usd": acct.total_donations_in_usd - + decimal.Decimal(net_amount_usd), - } - await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) + ### COMMENTING OUT FOR NOW SINCE WE HAVE PERIODIC JOB RUNNING TO UPDATE ACCOUNT STATS (NB: DOESN'T CURRENTLY COVER POT STATS) + ### CAN ALWAYS ADD BACK IF DESIRED + # if donation_created: # only do stats updates if donation object was created + + # if actionName != "direct": + + # potUpdate = { + # "total_public_donations": str( + # int(pot.total_public_donations or 0) + int(total_amount) + # ), + # "total_public_donations_usd": int(pot.total_public_donations_usd or 0.0) + # + total_amount_usd, + # } + # if donation_data.get("matching_pool"): + # potUpdate["total_matching_pool"] = str( + # int(pot.total_matching_pool or 0) + int(total_amount) + # ) + # potUpdate["total_matching_pool"] = ( + # pot.total_matching_pool_usd or 0.0 + # ) + total_amount_usd + # potUpdate["matching_pool_donations_count"] = ( + # pot.matching_pool_donations_count or 0 + # ) + 1 + + # if recipient: + # await Account.objects.filter(id=recipient.id).aupdate( + # **{ + # "total_matching_pool_allocations_usd": recipient.total_matching_pool_allocations_usd + # + total_amount_usd + # } + # ) + + # # accountUpdate = {} + # else: + # potUpdate["public_donations_count"] = ( + # pot.public_donations_count or 0 + # ) + 1 + + # await Pot.objects.filter(id=receiverId).aupdate(**potUpdate) + + # # donation_recipient = donation_data.get('project_id', donation_data['recipient_id']) + # logger.info( + # f"update total donated for {donor.id}, {donor.total_donations_out_usd + decimal.Decimal(total_amount_usd)}" + # ) + # await Account.objects.filter(id=donor.id).aupdate( + # **{ + # "total_donations_out_usd": donor.total_donations_out_usd + # + decimal.Decimal(total_amount_usd) + # } + # ) + # if recipient: + # acct = await Account.objects.aget(id=recipient.id) + # logger.info(f"selected {acct} to perform donor count update") + # acctUpdate = { + # "donors_count": acct.donors_count + 1, + # "total_donations_in_usd": acct.total_donations_in_usd + # + decimal.Decimal(net_amount_usd), + # } + # await Account.objects.filter(id=recipient.id).aupdate(**acctUpdate) async def cache_block_height( diff --git a/tokens/models.py b/tokens/models.py index aa4a309..5db0b46 100644 --- a/tokens/models.py +++ b/tokens/models.py @@ -21,6 +21,10 @@ class Token(models.Model): def get_most_recent_price(self): return self.historical_prices.order_by("-timestamp").first() + def format_price(self, amount_str: str): + formatted_amount = int(amount_str) / (10**self.decimals) + return formatted_amount + class TokenHistoricalPrice(models.Model): token = models.ForeignKey( From de076444efa671bf6458c83354aa9f652583895e Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 14:13:29 -0400 Subject: [PATCH 068/127] add fetch_usd_prices to celery beat schedule --- base/celery.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/base/celery.py b/base/celery.py index 801159f..1aa4d8f 100644 --- a/base/celery.py +++ b/base/celery.py @@ -30,10 +30,14 @@ "schedule": crontab(minute="*/5"), # Executes every 5 minutes "options": {"queue": "beat_tasks"}, }, + "fetch_usd_prices_every_5_minutes": { + "task": "indexer_app.tasks.fetch_usd_prices", + "schedule": crontab(minute="*/5"), # Executes every 5 minutes + "options": {"queue": "beat_tasks"}, + }, } app.conf.task_routes = { "indexer_app.tasks.update_account_statistics": {"queue": "beat_tasks"}, + "indexer_app.tasks.fetch_usd_prices": {"queue": "beat_tasks"}, } - -# TODO: (pick up here): rename & restart services, using correct workers From 96c4afe5481eff1172752c4ea1a1cad76436295f Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 14:20:46 -0400 Subject: [PATCH 069/127] add celery-beat stop/restart to after_install --- scripts/after_install.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index c847c6c..4ffdb25 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -50,17 +50,17 @@ PENDING_MIGRATIONS=$(python manage.py showmigrations | grep "\[ \]" | wc -l) # if [ "$PENDING_MIGRATIONS" -gt 0 ]; then echo "Migrations found; stopping services..." >> "$LOG_FILE" - sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker + sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat echo 'Applying migrations...' >> "$LOG_FILE" python manage.py migrate >> "$LOG_FILE" 2>&1 echo 'Starting services...' >> "$LOG_FILE" - sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker + sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 - sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker + sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From d1180aa20ff84b3ac8652a4a0908f04550404ead Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 14:37:02 -0400 Subject: [PATCH 070/127] make fetch_usd_prices task synchronous --- donations/models.py | 17 +++++++++++------ indexer_app/tasks.py | 23 ++++++++++++++++------- indexer_app/utils.py | 2 +- 3 files changed, 28 insertions(+), 14 deletions(-) diff --git a/donations/models.py b/donations/models.py index 7f39a4f..d6af2ce 100644 --- a/donations/models.py +++ b/donations/models.py @@ -1,6 +1,7 @@ from datetime import timedelta import requests +from asgiref.sync import sync_to_async from django.conf import settings from django.db import models from django.utils.translation import gettext_lazy as _ @@ -174,16 +175,20 @@ class Meta: ), ] - async def get_ft_token(self): + def get_ft_token(self): try: - token = await Token.objects.aget(id=self.ft) + token = Token.objects.aget(id=self.ft) except Token.DoesNotExist: # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common - token = await Token.objects.acreate(id=self.ft, decimals=12) + token = Token.objects.acreate(id=self.ft, decimals=12) return token + async def fetch_usd_prices_async(self): + fetch_prices = sync_to_async(self.fetch_usd_prices) + await fetch_prices() + ### Fetches USD prices for the Donation record and saves USD totals - async def fetch_usd_prices(self): + def fetch_usd_prices(self): # get existing values for stats adjustments later existing_total_amount_usd = self.total_amount_usd existing_net_amount_usd = self.net_amount_usd @@ -191,7 +196,7 @@ async def fetch_usd_prices(self): existing_referrer_fee_usd = self.referrer_fee_usd existing_chef_fee_usd = self.chef_fee_usd # first, see if there is a TokenHistoricalPrice within 1 day (or HISTORICAL_PRICE_QUERY_HOURS) of self.donated_at - token = await self.get_ft_token() + token = self.get_ft_token() time_window = timedelta(hours=settings.HISTORICAL_PRICE_QUERY_HOURS or 24) token_prices = TokenHistoricalPrice.objects.filter( token=token, @@ -253,7 +258,7 @@ async def fetch_usd_prices(self): ) # TODO: update totals for relevant accounts try: - await TokenHistoricalPrice.objects.acreate( + TokenHistoricalPrice.objects.create( token=token, price_usd=price_usd, timestamp=self.donated_at, diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 93eafb0..c0f6891 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -68,23 +68,32 @@ def listen_to_near_events(): jobs_logger = logging.getLogger("jobs") -@shared_task -async def fetch_usd_prices(): - # fetch all Donation records that have a null total_amount_usd or net_amount_usd - # get closest TokenHistoricalPrice record for each donation, and check if it's within 1 day - # if not, fetch the price from coingecko API, create new TokenHistoricalPrice record and update the Donation record +# @shared_task +# def fetch_usd_prices(): +# donations = Donation.objects.filter( +# Q(total_amount_usd__isnull=True) | Q(net_amount_usd__isnull=True) +# ) +# jobs_logger.info(f"Fetching USD prices for {donations.count()} donations...") +# loop = asyncio.get_event_loop() +# tasks = [loop.create_task(donation.fetch_usd_prices()) for donation in donations] +# loop.run_until_complete(asyncio.gather(*tasks)) + +@shared_task +def fetch_usd_prices(): donations = Donation.objects.filter( Q(total_amount_usd__isnull=True) | Q(net_amount_usd__isnull=True) ) - jobs_logger.info(f"Fetching USD prices for {donations.count()} donations...") + donations_count = donations.count() + jobs_logger.info(f"Fetching USD prices for {donations_count} donations...") for donation in donations: try: - await donation.fetch_usd_prices() + donation.fetch_usd_prices() except Exception as e: jobs_logger.error( f"Failed to fetch USD prices for donation {donation.id}: {e}" ) + jobs_logger.info(f"USD prices fetched for {donations_count} donations.") @shared_task diff --git a/indexer_app/utils.py b/indexer_app/utils.py index e6560d4..e7daeca 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -662,7 +662,7 @@ async def handle_new_donations( logger.info(f"Created donation? {donation_created}") # fetch USD prices - await donation.fetch_usd_prices() # might not need to await this? + await donation.fetch_usd_prices_async() # might not need to await this? # # convert total_amount_usd and net_amount_usd from None # if total_amount_usd is None: From 92c1915c34886abaa0cd43156ada012793e9e692 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 14:43:44 -0400 Subject: [PATCH 071/127] use sync db methods in Donation model methods --- donations/models.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/donations/models.py b/donations/models.py index d6af2ce..c5d1df6 100644 --- a/donations/models.py +++ b/donations/models.py @@ -176,11 +176,13 @@ class Meta: ] def get_ft_token(self): - try: - token = Token.objects.aget(id=self.ft) - except Token.DoesNotExist: - # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common - token = Token.objects.acreate(id=self.ft, decimals=12) + token, created = Token.objects.get_or_create( + id=self.ft, + defaults={"decimals": 12}, # Default values for new Token creation + ) + if created: + # TODO: fetch token metadata and add correct decimals, possibly other metadata + pass return token async def fetch_usd_prices_async(self): From d0f384d567a7cca3ed86881a7a587ce578444ade Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 14:52:00 -0400 Subject: [PATCH 072/127] allow None chef_fee on Donation --- .../0006_alter_donation_chef_fee.py | 20 ++++++++++++++ donations/models.py | 2 +- indexer_app/utils.py | 26 +++++++++---------- 3 files changed, 34 insertions(+), 14 deletions(-) create mode 100644 donations/migrations/0006_alter_donation_chef_fee.py diff --git a/donations/migrations/0006_alter_donation_chef_fee.py b/donations/migrations/0006_alter_donation_chef_fee.py new file mode 100644 index 0000000..c22db14 --- /dev/null +++ b/donations/migrations/0006_alter_donation_chef_fee.py @@ -0,0 +1,20 @@ +# Generated by Django 5.0.4 on 2024-05-07 18:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("donations", "0005_remove_donation_unique_pot_on_chain_id_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="donation", + name="chef_fee", + field=models.CharField( + help_text="Chef fee.", max_length=64, null=True, verbose_name="chef fee" + ), + ), + ] diff --git a/donations/models.py b/donations/models.py index c5d1df6..85d5f7b 100644 --- a/donations/models.py +++ b/donations/models.py @@ -144,7 +144,7 @@ class Donation(models.Model): chef_fee = models.CharField( _("chef fee"), max_length=64, - null=False, + null=True, help_text=_("Chef fee."), ) chef_fee_usd = models.DecimalField( diff --git a/indexer_app/utils.py b/indexer_app/utils.py index e7daeca..dc4b986 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -582,19 +582,19 @@ async def handle_new_donations( id=(donation_data.get("ft_id") or "near") ) - # Upsert token - try: - token = await Token.objects.aget(id=token_acct) - except Token.DoesNotExist: - # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common - token = await Token.objects.acreate(id=token_acct, decimals=12) - - # # Fetch historical token data - # # late_p = await token.get_most_recent_price() - # try: - # logger.info("fetching historical price...") - # logger.info(f"donated at: {donated_at}") - endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" + # # Upsert token + # try: + # token = await Token.objects.aget(id=token_acct) + # except Token.DoesNotExist: + # # TODO: fetch metadata from token contract (ft_metadata) and add decimals to token record. For now adding 12 which is most common + # token = await Token.objects.acreate(id=token_acct, decimals=12) + + # # Fetch historical token data + # # late_p = await token.get_most_recent_price() + # try: + # logger.info("fetching historical price...") + # logger.info(f"donated at: {donated_at}") + # endpoint = f"{GECKO_URL}/coins/{donation_data.get('ft_id', 'near')}/history?date={format_date(donated_at)}&localization=false" # logger.info(f"endpoint: {endpoint}") # response = requests.get(endpoint) # logger.info(f"response: {response}") From 1e5209683f4e55d4fc49335ec2d5d61002811ec5 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 14:59:45 -0400 Subject: [PATCH 073/127] add migration to set chef_fee to None for existing donations --- .../migrations/0007_auto_20240507_1857.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 donations/migrations/0007_auto_20240507_1857.py diff --git a/donations/migrations/0007_auto_20240507_1857.py b/donations/migrations/0007_auto_20240507_1857.py new file mode 100644 index 0000000..1052a55 --- /dev/null +++ b/donations/migrations/0007_auto_20240507_1857.py @@ -0,0 +1,19 @@ +# Generated by Django 5.0.4 on 2024-05-07 18:57 + +from django.db import migrations + +from ..models import Donation + + +class Migration(migrations.Migration): + + def set_chef_fee_none(apps, schema_editor): + Donation.objects.update(chef_fee=None) + + dependencies = [ + ("donations", "0006_alter_donation_chef_fee"), + ] + + operations = [ + migrations.RunPython(set_chef_fee_none), + ] From 58d560ce02a082bcccb4f70ef67397a965849cad Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 15:06:10 -0400 Subject: [PATCH 074/127] fix int None issue in fetch_usd_prices --- donations/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/donations/models.py b/donations/models.py index 85d5f7b..d2380e9 100644 --- a/donations/models.py +++ b/donations/models.py @@ -211,7 +211,7 @@ def fetch_usd_prices(self): net_amount = token.format_price(self.net_amount) protocol_amount = token.format_price(self.protocol_fee) referrer_amount = token.format_price(self.referrer_fee or "0") - chef_amount = token.format_price(self.chef_fee) + chef_amount = token.format_price(self.chef_fee or "0") if existing_token_price: try: price_usd = existing_token_price.price_usd From 12dc03b7e162f065739cba2c8770bf255f5f1c9f Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 15:13:44 -0400 Subject: [PATCH 075/127] return decimal from Token.format_price --- tokens/models.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tokens/models.py b/tokens/models.py index 5db0b46..1b4d071 100644 --- a/tokens/models.py +++ b/tokens/models.py @@ -1,3 +1,5 @@ +from decimal import Decimal + from django.db import models from django.utils import timezone from django.utils.translation import gettext_lazy as _ @@ -22,7 +24,8 @@ def get_most_recent_price(self): return self.historical_prices.order_by("-timestamp").first() def format_price(self, amount_str: str): - formatted_amount = int(amount_str) / (10**self.decimals) + # Convert the string amount to a Decimal, then adjust by the token's decimal places + formatted_amount = Decimal(amount_str) / (Decimal("10") ** self.decimals) return formatted_amount From 3ca9a75f43eebcca10c618f936d6b2b0f6e8d02e Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 7 May 2024 15:19:52 -0400 Subject: [PATCH 076/127] remove redundant log from fetch_usd_prices --- donations/models.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/donations/models.py b/donations/models.py index d2380e9..5d7e0c4 100644 --- a/donations/models.py +++ b/donations/models.py @@ -206,7 +206,6 @@ def fetch_usd_prices(self): timestamp__lte=self.donated_at + time_window, ) existing_token_price = token_prices.first() - logger.info(f"existing token price: {existing_token_price}") total_amount = token.format_price(self.total_amount) net_amount = token.format_price(self.net_amount) protocol_amount = token.format_price(self.protocol_fee) @@ -221,6 +220,9 @@ def fetch_usd_prices(self): self.referrer_fee_usd = referrer_amount * price_usd self.chef_fee_usd = chef_amount * price_usd self.save() + logger.info( + "USD prices calculated and saved using existing TokenHistoricalPrice" + ) except Exception as e: logger.error( f"Failed to calculate and save USD prices using existing TokenHistoricalPrice: {e}" From 2d0ddaf8cc7b37e35d0a57a982106d252ebd9f8f Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 8 May 2024 08:38:11 -0400 Subject: [PATCH 077/127] fix decimal * flot issue & restart indexer from block 105_854_538 --- donations/models.py | 3 +++ indexer_app/tasks.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/donations/models.py b/donations/models.py index 5d7e0c4..c4f1727 100644 --- a/donations/models.py +++ b/donations/models.py @@ -1,4 +1,5 @@ from datetime import timedelta +from decimal import Decimal import requests from asgiref.sync import sync_to_async @@ -250,6 +251,8 @@ def fetch_usd_prices(self): logger.info(f"unit price: {price_usd}") if price_usd: try: + # convert price_usd to decimal + price_usd = Decimal(price_usd) self.total_amount_usd = total_amount * price_usd self.net_amount_usd = net_amount * price_usd self.protocol_fee_usd = protocol_amount * price_usd diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index c0f6891..fb5b4b5 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -57,8 +57,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - start_block = get_block_height("current_block_height") - # start_block = 105_534_694 # manually setting for reindexing TODO: remove this + # start_block = get_block_height("current_block_height") + start_block = 105_854_538 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 27ac54caf67894c9da048df480be6ce03f5e2aab Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 8 May 2024 08:42:35 -0400 Subject: [PATCH 078/127] update usd prices job to check for no protocol_fee_usd value --- donations/models.py | 21 +++++++++++++++------ indexer_app/tasks.py | 6 +++++- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/donations/models.py b/donations/models.py index c4f1727..bc671cc 100644 --- a/donations/models.py +++ b/donations/models.py @@ -210,16 +210,21 @@ def fetch_usd_prices(self): total_amount = token.format_price(self.total_amount) net_amount = token.format_price(self.net_amount) protocol_amount = token.format_price(self.protocol_fee) - referrer_amount = token.format_price(self.referrer_fee or "0") - chef_amount = token.format_price(self.chef_fee or "0") + referrer_amount = ( + None if not self.referrer_fee else token.format_price(self.referrer_fee) + ) + chef_amount = None if not self.chef_fee else token.format_price(self.chef_fee) + # chef_amount = token.format_price(self.chef_fee or "0") if existing_token_price: try: price_usd = existing_token_price.price_usd self.total_amount_usd = total_amount * price_usd self.net_amount_usd = net_amount * price_usd self.protocol_fee_usd = protocol_amount * price_usd - self.referrer_fee_usd = referrer_amount * price_usd - self.chef_fee_usd = chef_amount * price_usd + self.referrer_fee_usd = ( + None if not referrer_amount else referrer_amount * price_usd + ) + self.chef_fee_usd = None if not chef_amount else chef_amount * price_usd self.save() logger.info( "USD prices calculated and saved using existing TokenHistoricalPrice" @@ -256,8 +261,12 @@ def fetch_usd_prices(self): self.total_amount_usd = total_amount * price_usd self.net_amount_usd = net_amount * price_usd self.protocol_fee_usd = protocol_amount * price_usd - self.referrer_fee_usd = referrer_amount * price_usd - self.chef_fee_usd = chef_amount * price_usd + self.referrer_fee_usd = ( + None if not referrer_amount else referrer_amount * price_usd + ) + self.chef_fee_usd = ( + None if not chef_amount else chef_amount * price_usd + ) self.save() except Exception as e: logger.error( diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index fb5b4b5..0b88cf0 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -82,7 +82,11 @@ def listen_to_near_events(): @shared_task def fetch_usd_prices(): donations = Donation.objects.filter( - Q(total_amount_usd__isnull=True) | Q(net_amount_usd__isnull=True) + Q(total_amount_usd__isnull=True) + | Q(net_amount_usd__isnull=True) + | Q(protocol_fee_usd__isnull=True) + | Q(referrer_fee_usd__isnull=True) + | Q(chef_fee_usd__isnull=True) ) donations_count = donations.count() jobs_logger.info(f"Fetching USD prices for {donations_count} donations...") From d29667430063ce1c47b04fd99871fb45479ed882 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 8 May 2024 08:43:25 -0400 Subject: [PATCH 079/127] remove manual start block setting --- indexer_app/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 0b88cf0..21eab1a 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -57,8 +57,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - # start_block = get_block_height("current_block_height") - start_block = 105_854_538 # manually setting for reindexing TODO: remove this + start_block = get_block_height("current_block_height") + # start_block = 105_854_538 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 4f1ce84e6c1d86e8ed91d687e32c18aa91b2c4cd Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 8 May 2024 08:46:01 -0400 Subject: [PATCH 080/127] fix donations query in fetch_usd_prices --- indexer_app/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 21eab1a..8272711 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -85,8 +85,8 @@ def fetch_usd_prices(): Q(total_amount_usd__isnull=True) | Q(net_amount_usd__isnull=True) | Q(protocol_fee_usd__isnull=True) - | Q(referrer_fee_usd__isnull=True) - | Q(chef_fee_usd__isnull=True) + | Q(referrer_fee__isnull=False, referrer_fee_usd__isnull=True) + | Q(chef_fee__isnull=False, chef_fee_usd__isnull=True) ) donations_count = donations.count() jobs_logger.info(f"Fetching USD prices for {donations_count} donations...") From e9eb35a53be9c0019bcc5d310855cd7ec3317edb Mon Sep 17 00:00:00 2001 From: Prometheus Date: Thu, 9 May 2024 13:56:30 +0100 Subject: [PATCH 081/127] Backfill compatible (#23) * feat: allow backfill by update_create,index admin challenge response * fix verbose name typo * remove unused imports --------- Co-authored-by: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> --- indexer_app/handler.py | 10 +- indexer_app/utils.py | 856 ++++++++++-------- .../0002_alter_listupvote_options_and_more.py | 22 + lists/models.py | 5 + pots/admin.py | 13 +- ...2_alter_potapplication_options_and_more.py | 73 ++ pots/models.py | 39 +- 7 files changed, 639 insertions(+), 379 deletions(-) create mode 100644 lists/migrations/0002_alter_listupvote_options_and_more.py create mode 100644 pots/migrations/0002_alter_potapplication_options_and_more.py diff --git a/indexer_app/handler.py b/indexer_app/handler.py index a3858fe..5c251cf 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -21,6 +21,7 @@ handle_new_pot, handle_new_pot_factory, handle_payout_challenge, + handle_payout_challenge_response, handle_pot_application, handle_pot_application_status_change, handle_set_payouts, @@ -270,7 +271,14 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess case "challenge_payouts": logger.info(f"challenge payout: {args_dict}") await handle_payout_challenge( - args_dict, receiver_id, signer_id, receipt.receipt_id + args_dict, receiver_id, signer_id, receipt.receipt_id, created_at + ) + break + + case "admin_update_payouts_challenge": + logger.info(f"challenge payout: {args_dict}") + await handle_payout_challenge_response( + args_dict, receiver_id, signer_id, receipt.receipt_id, created_at ) break diff --git a/indexer_app/utils.py b/indexer_app/utils.py index dc4b986..e3fd803 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -1,7 +1,7 @@ import base64 import decimal import json -from datetime import date, datetime +from datetime import datetime import requests from django.conf import settings @@ -11,7 +11,6 @@ from accounts.models import Account from activities.models import Activity -from base.utils import format_date, format_to_near from donations.models import Donation from indexer_app.models import BlockHeight from lists.models import List, ListRegistration, ListUpvote @@ -22,8 +21,8 @@ PotFactory, PotPayout, PotPayoutChallenge, + PotPayoutChallengeAdminResponse, ) -from tokens.models import Token, TokenHistoricalPrice from .logging import logger @@ -38,152 +37,177 @@ async def handle_new_pot( receiptId: str, created_at: datetime, ): - logger.info("new pot deployment process... upsert accounts,") - - # Upsert accounts - owner, _ = await Account.objects.aget_or_create(id=data["owner"]) - signer, _ = await Account.objects.aget_or_create(id=signerId) - receiver, _ = await Account.objects.aget_or_create(id=receiverId) - - logger.info("upsert chef") - if data.get("chef"): - chef, _ = await Account.objects.aget_or_create(id=data["chef"]) - - # Create Pot object - logger.info("create pot....") - potObject = await Pot.objects.acreate( - id=receiver, - pot_factory_id=predecessorId, - deployer=signer, - deployed_at=created_at, - source_metadata=data["source_metadata"], - owner_id=data["owner"], - chef_id=data.get("chef"), - name=data["pot_name"], - description=data["pot_description"], - max_approved_applicants=data["max_projects"], - base_currency="near", - application_start=datetime.fromtimestamp(data["application_start_ms"] / 1000), - application_end=datetime.fromtimestamp(data["application_end_ms"] / 1000), - matching_round_start=datetime.fromtimestamp( - data["public_round_start_ms"] / 1000 - ), - matching_round_end=datetime.fromtimestamp(data["public_round_end_ms"] / 1000), - registry_provider=data["registry_provider"], - min_matching_pool_donation_amount=data["min_matching_pool_donation_amount"], - sybil_wrapper_provider=data["sybil_wrapper_provider"], - custom_sybil_checks=data.get("custom_sybil_checks"), - custom_min_threshold_score=data.get("custom_min_threshold_score"), - referral_fee_matching_pool_basis_points=data[ - "referral_fee_matching_pool_basis_points" - ], - referral_fee_public_round_basis_points=data[ - "referral_fee_public_round_basis_points" - ], - chef_fee_basis_points=data["chef_fee_basis_points"], - total_matching_pool="0", - matching_pool_balance="0", - matching_pool_donations_count=0, - total_public_donations="0", - public_donations_count=0, - cooldown_period_ms=None, - all_paid_out=False, - protocol_config_provider=data["protocol_config_provider"], - ) + try: - # Add admins to the Pot - if data.get("admins"): - for admin_id in data["admins"]: - admin, _ = await Account.objects.aget_or_create(id=admin_id) - potObject.admins.aadd(admin) - - # Create activity object - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=created_at, - type="Deploy_Pot", - action_result=data, - tx_hash=receiptId, - ) + logger.info("new pot deployment process... upsert accounts,") + + # Upsert accounts + owner, _ = await Account.objects.aget_or_create(id=data["owner"]) + signer, _ = await Account.objects.aget_or_create(id=signerId) + receiver, _ = await Account.objects.aget_or_create(id=receiverId) + + logger.info("upsert chef") + if data.get("chef"): + chef, _ = await Account.objects.aget_or_create(id=data["chef"]) + + # Create Pot object + logger.info("create pot....") + pot_defaults = { + "pot_factory_id": predecessorId, + "deployer": signer, + "deployed_at": created_at, + "source_metadata": data["source_metadata"], + "owner_id": data["owner"], + "chef_id": data.get("chef"), + "name": data["pot_name"], + "description": data["pot_description"], + "max_approved_applicants": data["max_projects"], + "base_currency": "near", + "application_start": datetime.fromtimestamp( + data["application_start_ms"] / 1000 + ), + "application_end": datetime.fromtimestamp( + data["application_end_ms"] / 1000 + ), + "matching_round_start": datetime.fromtimestamp( + data["public_round_start_ms"] / 1000 + ), + "matching_round_end": datetime.fromtimestamp( + data["public_round_end_ms"] / 1000 + ), + "registry_provider": data["registry_provider"], + "min_matching_pool_donation_amount": data[ + "min_matching_pool_donation_amount" + ], + "sybil_wrapper_provider": data["sybil_wrapper_provider"], + "custom_sybil_checks": data.get("custom_sybil_checks"), + "custom_min_threshold_score": data.get("custom_min_threshold_score"), + "referral_fee_matching_pool_basis_points": data[ + "referral_fee_matching_pool_basis_points" + ], + "referral_fee_public_round_basis_points": data[ + "referral_fee_public_round_basis_points" + ], + "chef_fee_basis_points": data["chef_fee_basis_points"], + "total_matching_pool": "0", + "matching_pool_balance": "0", + "matching_pool_donations_count": 0, + "total_public_donations": "0", + "public_donations_count": 0, + "cooldown_period_ms": None, + "all_paid_out": False, + "protocol_config_provider": data["protocol_config_provider"], + } + potObject = await Pot.objects.aupdate_or_create( + id=receiver, defaults=pot_defaults + ) + + # Add admins to the Pot + if data.get("admins"): + for admin_id in data["admins"]: + admin, _ = await Account.objects.aget_or_create(id=admin_id) + potObject.admins.aadd(admin) + + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receiptId, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=data, type="Deploy_Pot", defaults=defaults + ) + except Exception as e: + logger.error(f"Failed to handle new pot, Error: {e}") async def handle_new_pot_factory(data: dict, receiverId: str, created_at: datetime): - logger.info("upserting accounts...") + try: - # Upsert accounts - owner, _ = await Account.objects.aget_or_create( - id=data["owner"], - ) - protocol_fee_recipient_account, _ = await Account.objects.aget_or_create( - id=data["protocol_fee_recipient_account"], - ) + logger.info("upserting accounts...") - receiver, _ = await Account.objects.aget_or_create( - id=receiverId, - ) + # Upsert accounts + owner, _ = await Account.objects.aget_or_create( + id=data["owner"], + ) + protocol_fee_recipient_account, _ = await Account.objects.aget_or_create( + id=data["protocol_fee_recipient_account"], + ) - logger.info("creating factory....") - # Create Factory object - factory = await PotFactory.objects.acreate( - id=receiver, - owner=owner, - deployed_at=created_at, - source_metadata=data["source_metadata"], - protocol_fee_basis_points=data["protocol_fee_basis_points"], - protocol_fee_recipient=protocol_fee_recipient_account, - require_whitelist=data["require_whitelist"], - ) + receiver, _ = await Account.objects.aget_or_create( + id=receiverId, + ) - # Add admins to the PotFactory - if data.get("admins"): - for admin_id in data["admins"]: - admin, _ = await Account.objects.aget_or_create( - id=admin_id, - ) - await factory.admins.aadd(admin) + logger.info("creating factory....") + defaults = { + "owner": owner, + "deployed_at": created_at, + "source_metadata": data["source_metadata"], + "protocol_fee_basis_points": data["protocol_fee_basis_points"], + "protocol_fee_recipient": protocol_fee_recipient_account, + "require_whitelist": data["require_whitelist"], + } + # Create Factory object + factory, factory_created = await PotFactory.objects.aupdate_or_create( + id=receiver, defaults=defaults + ) - # Add whitelisted deployers to the PotFactory - if data.get("whitelisted_deployers"): - for deployer_id in data["whitelisted_deployers"]: - deployer, _ = await Account.objects.aget_or_create(id=deployer_id) - await factory.whitelisted_deployers.aadd(deployer) + # Add admins to the PotFactory + if data.get("admins"): + for admin_id in data["admins"]: + admin, _ = await Account.objects.aget_or_create( + id=admin_id, + ) + await factory.admins.aadd(admin) + + # Add whitelisted deployers to the PotFactory + if data.get("whitelisted_deployers"): + for deployer_id in data["whitelisted_deployers"]: + deployer, _ = await Account.objects.aget_or_create(id=deployer_id) + await factory.whitelisted_deployers.aadd(deployer) + except Exception as e: + logger.error(f"Failed to handle new pot Factory, Error: {e}") async def handle_new_list(signerId: str, receiverId: str, status_obj: ExecutionOutcome): # receipt = block.receipts().filter(receiptId=receiptId)[0] - data = json.loads( - base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") - ) + try: - logger.info(f"creating list..... {data}") - - listObject = await List.objects.acreate( - id=data["id"], - owner_id=data["owner"], - default_registration_status=data["default_registration_status"], - name=data["name"], - description=data["description"], - cover_image_url=data["cover_image_url"], - admin_only_registrations=data["admin_only_registrations"], - created_at=datetime.fromtimestamp(data["created_at"] / 1000), - updated_at=datetime.fromtimestamp(data["updated_at"] / 1000), - ) + data = json.loads( + base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") + ) - logger.info("upserting involveed accts...") + logger.info(f"creating list..... {data}") + + listObject = await List.objects.acreate( + id=data["id"], + owner_id=data["owner"], + default_registration_status=data["default_registration_status"], + name=data["name"], + description=data["description"], + cover_image_url=data["cover_image_url"], + admin_only_registrations=data["admin_only_registrations"], + created_at=datetime.fromtimestamp(data["created_at"] / 1000), + updated_at=datetime.fromtimestamp(data["updated_at"] / 1000), + ) - await Account.objects.aget_or_create(id=data["owner"]) + logger.info("upserting involveed accts...") - await Account.objects.aget_or_create(id=signerId) + await Account.objects.aget_or_create(id=data["owner"]) - await Account.objects.aget_or_create(id=receiverId) + await Account.objects.aget_or_create(id=signerId) - if data.get("admins"): - for admin_id in data["admins"]: - admin_object, _ = await Account.objects.aget_or_create( - id=admin_id, - ) - await listObject.admins.aadd(admin_object) + await Account.objects.aget_or_create(id=receiverId) + + if data.get("admins"): + for admin_id in data["admins"]: + admin_object, _ = await Account.objects.aget_or_create( + id=admin_id, + ) + await listObject.admins.aadd(admin_object) + except Exception as e: + logger.error(f"Failed to handle new list, Error: {e}") async def handle_new_list_registration( @@ -240,13 +264,15 @@ async def handle_new_list_registration( # Insert activity try: - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=insert_data[0]["submitted_at"], - type="Register_Batch", - action_result=reg_data, - tx_hash=receipt.receipt_id, + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": datetime.fromtimestamp(insert_data[0]["submitted_at"] / 1000), + "tx_hash": receipt.receipt_id, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=reg_data, type="Register_Batch", defaults=defaults ) except Exception as e: logger.error(f"Encountered error trying to insert activity: {e}") @@ -283,47 +309,61 @@ async def handle_pot_application( status_obj: ExecutionOutcome, created_at: datetime, ): - # receipt = block.receipts().filter(lambda receipt: receipt.receiptId == receiptId)[0] - result = status_obj.status.get("SuccessValue") - if not result: - return + try: - appl_data = json.loads(base64.b64decode(result).decode("utf-8")) - logger.info(f"new pot application data: {data}, {appl_data}") + # receipt = block.receipts().filter(lambda receipt: receipt.receiptId == receiptId)[0] + result = status_obj.status.get("SuccessValue") + if not result: + return - # Update or create the account - project, _ = await Account.objects.aget_or_create( - id=data["project_id"], - ) + appl_data = json.loads(base64.b64decode(result).decode("utf-8")) + logger.info(f"new pot application data: {data}, {appl_data}") - signer, _ = await Account.objects.aget_or_create( - id=signerId, - ) + # Update or create the account + project, _ = await Account.objects.aget_or_create( + id=data["project_id"], + ) - # Create the PotApplication object - logger.info("creating application.......") - application = await PotApplication.objects.acreate( - pot_id=receiverId, - applicant=project, - message=appl_data["message"], - submitted_at=datetime.fromtimestamp(appl_data["submitted_at"] / 1000), - updated_at=created_at, - status=appl_data["status"], - tx_hash=receipt.receipt_id, - ) + signer, _ = await Account.objects.aget_or_create( + id=signerId, + ) - # Create the activity object - logger.info("creating activity for action....") - await Activity.objects.acreate( - signer=signer, - receiver_id=receiverId, - timestamp=application.submitted_at, - type="Submit_Application", - action_result=appl_data, - tx_hash=receipt.receipt_id, - ) + # Create the PotApplication object + logger.info("creating application.......") + appl_defaults = { + "message": appl_data["message"], + "submitted_at": datetime.fromtimestamp(appl_data["submitted_at"] / 1000), + "updated_at": created_at, + "status": appl_data["status"], + "tx_hash": receipt.receipt_id, + } + application, application_created = ( + await PotApplication.objects.aupdate_or_create( + applicant=project, + pot_id=receiverId, + defaults=appl_defaults, + ) + ) - logger.info("PotApplication and Activity created successfully.") + # Create the activity object + logger.info("creating activity for action....") + + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receipt.receipt_id, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=appl_data, type="Submit_Application", defaults=defaults + ) + + logger.info( + f"PotApplication and Activity created successfully, {activity_created}" + ) + except Exception as e: + logger.error(f"Failed to handle pot application, Error: {e}") async def handle_pot_application_status_change( @@ -333,166 +373,227 @@ async def handle_pot_application_status_change( receipt: Receipt, status_obj: ExecutionOutcome, ): - logger.info(f"pot application update data: {data}, {receiverId}") + try: - # receipt = next(receipt for receipt in block.receipts() if receipt.receiptId == receiptId) - update_data = json.loads( - base64.b64decode(status_obj.status["SuccessValue"]).decode("utf-8") - ) + logger.info(f"pot application update data: {data}, {receiverId}") - # Retrieve the PotApplication object - appl = await PotApplication.objects.filter( - applicant_id=data["project_id"] - ).afirst() # TODO: handle this being None - - # Create the PotApplicationReview object - logger.info(f"create review...... {appl}") - updated_at = datetime.fromtimestamp(update_data.get("updated_at") / 1000) - await PotApplicationReview.objects.acreate( - application_id=appl.id, - reviewer_id=signerId, - notes=update_data.get("review_notes"), - status=update_data["status"], - reviewed_at=updated_at, - tx_hash=receipt.receipt_id, - ) + # receipt = next(receipt for receipt in block.receipts() if receipt.receiptId == receiptId) + update_data = json.loads( + base64.b64decode(status_obj.status["SuccessValue"]).decode("utf-8") + ) - # Update the PotApplication object - await PotApplication.objects.filter(applicant_id=data["project_id"]).aupdate( - **{"status": update_data["status"], "updated_at": updated_at} - ) + # Retrieve the PotApplication object + appl = await PotApplication.objects.filter( + applicant_id=data["project_id"] + ).afirst() # TODO: handle this being None + + # Create the PotApplicationReview object + logger.info(f"create review...... {appl}") + updated_at = datetime.fromtimestamp(update_data.get("updated_at") / 1000) + + defaults = { + "notes": update_data.get("review_notes"), + "status": update_data["status"], + "tx_hash": receipt.receipt_id, + } + + await PotApplicationReview.objects.aupdate_or_create( + application_id=appl.id, + reviewer_id=signerId, + reviewed_at=updated_at, + defaults=defaults, + ) - logger.info("PotApplicationReview and PotApplication updated successfully.") + # Update the PotApplication object + await PotApplication.objects.filter(applicant_id=data["project_id"]).aupdate( + **{"status": update_data["status"], "updated_at": updated_at} + ) + + logger.info("PotApplicationReview and PotApplication updated successfully.") + except Exception as e: + logger.warning(f"Failed to change pot application status, Error: {e}") async def handle_default_list_status_change( data: dict, receiverId: str, status_obj: ExecutionOutcome ): - logger.info(f"update project data: {data}, {receiverId}") + try: - result_data = json.loads( - base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") - ) + logger.info(f"update project data: {data}, {receiverId}") - list_id = data.get("registration_id") - list_update = { - "name": result_data["name"], - "owner_id": result_data["owner"], - "default_registration_status": result_data["default_registration_status"], - "admin_only_registrations": result_data["admin_only_registrations"], - "updated_at": result_data["updated_at"], - } - if result_data.get("description"): - list_update["description"] = result_data["description"] - if result_data.get("cover_image_url"): - list_update["cover_image_url"] = result_data["cover_image_url"] + result_data = json.loads( + base64.b64decode(status_obj.status.get("SuccessValue")).decode("utf-8") + ) - await List.objects.filter(id=list_id).aupdate(**list_update) + list_id = data.get("registration_id") + list_update = { + "name": result_data["name"], + "owner_id": result_data["owner"], + "default_registration_status": result_data["default_registration_status"], + "admin_only_registrations": result_data["admin_only_registrations"], + "updated_at": result_data["updated_at"], + } + if result_data.get("description"): + list_update["description"] = result_data["description"] + if result_data.get("cover_image_url"): + list_update["cover_image_url"] = result_data["cover_image_url"] - logger.info("List updated successfully.") + await List.objects.filter(id=list_id).aupdate(**list_update) + + logger.info("List updated successfully.") + except Exception as e: + logger.warning(f"Failed to change list status, Error: {e}") async def handle_list_upvote( data: dict, receiverId: str, signerId: str, receiptId: str ): - logger.info(f"upvote list: {data}, {receiverId}") + try: - acct, _ = await Account.objects.aget_or_create( - id=signerId, - ) + logger.info(f"upvote list: {data}, {receiverId}") - created_at = datetime.now() + acct, _ = await Account.objects.aget_or_create( + id=signerId, + ) - await ListUpvote.objects.acreate( - list_id=data.get("list_id") or receiverId, - account_id=signerId, - created_at=created_at, - ) + created_at = datetime.now() - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=created_at, - type="Upvote", - action_result=data, - tx_hash=receiptId, - ) + await ListUpvote.objects.aupdate_or_create( + list_id=data.get("list_id") or receiverId, + account_id=signerId, + ) + + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receiptId, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=data, type="Upvote", defaults=defaults + ) - logger.info("Upvote and activity records created successfully.") + logger.info( + f"Upvote and activity records created successfully. {activity_created}" + ) + except Exception as e: + logger.warning(f"Failed to upvote list, Error: {e}") async def handle_set_payouts(data: dict, receiverId: str, receipt: Receipt): - logger.info(f"set payout data: {data}, {receiverId}") - payouts = data.get("payouts", []) - - insertion_data = [] - for payout in payouts: - # General question: should we register projects as accounts? - potPayout = { - "recipient_id": payout.get("project_id"), - "amount": payout.get("amount"), - "ft_id": payout.get("ft_id", "near"), - "tx_hash": receipt.receipt_id, - } - insertion_data.append(potPayout) + try: + + logger.info(f"set payout data: {data}, {receiverId}") + payouts = data.get("payouts", []) + + insertion_data = [] + for payout in payouts: + # General question: should we register projects as accounts? + potPayout = { + "recipient_id": payout.get("project_id"), + "amount": payout.get("amount"), + "ft_id": payout.get("ft_id", "near"), + "tx_hash": receipt.receipt_id, + } + insertion_data.append(potPayout) - await PotPayout.objects.abulk_create(insertion_data) + await PotPayout.objects.abulk_create(insertion_data, ignore_conflicts=True) + except Exception as e: + logger.warning(f"Failed to set payouts, Error: {e}") async def handle_transfer_payout( data: dict, receiverId: str, receiptId: str, created_at: datetime ): - data = data["payout"] - logger.info(f"fulfill payout data: {data}, {receiverId}") - payout = { - "recipient_id": data["project_id"], - "amount": data["amount"], - "paid_at": data.get("paid_at", created_at), - "tx_hash": receiptId, - } - await PotPayout.objects.filter(recipient_id=data["project_id"]).aupdate(**payout) + try: + + data = data["payout"] + logger.info(f"fulfill payout data: {data}, {receiverId}") + payout = { + "recipient_id": data["project_id"], + "amount": data["amount"], + "paid_at": data.get("paid_at", created_at), + "tx_hash": receiptId, + } + await PotPayout.objects.filter(recipient_id=data["project_id"]).aupdate( + **payout + ) + except Exception as e: + logger.warning(f"Failed to create payout data, Error: {e}") async def handle_payout_challenge( - data: dict, receiverId: str, signerId: str, receiptId: str + data: dict, receiverId: str, signerId: str, receiptId: str, created_at: datetime ): - logger.info(f"challenging payout..: {data}, {receiverId}") - created_at = datetime.now() - payoutChallenge = { - "challenger_id": signerId, - "pot_id": receiverId, - "created_at": created_at, - "message": data["reason"], - "tx_hash": receiptId, - } - await PotPayoutChallenge.objects.acreate(**payoutChallenge) - - await Activity.objects.acreate( - signer_id=signerId, - receiver_id=receiverId, - timestamp=created_at, - type="Challenge_Payout", - action_result=payoutChallenge, - tx_hash=receiptId, - ) + try: + + logger.info(f"challenging payout..: {data}, {receiverId}") + payoutChallenge = { + "created_at": created_at, + "message": data["reason"], + "tx_hash": receiptId, + } + await PotPayoutChallenge.objects.aupdate_or_create( + challenger_id=signerId, pot_id=receiverId, defaults=payoutChallenge + ) + + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": created_at, + "tx_hash": receiptId, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=payoutChallenge, type="Challenge_Payout", defaults=defaults + ) + except Exception as e: + logger.warning(f"Failed to create payoutchallenge, Error: {e}") + + +async def handle_payout_challenge_response( + data: dict, receiverId: str, signerId: str, receiptId: str, created_at: datetime +): + try: + logger.info(f"responding to payout challenge..: {data}, {receiverId}") + response_defaults = { + "admin": signerId, + "message": data.get("notes"), + "resolved": data.get("resolve_challenge"), + "tx_hash": receiptId, + } + await PotPayoutChallengeAdminResponse.objects.aupdate_or_create( + challenger_id=data["challenger_id"], + pot_id=receiverId, + created_at=created_at, + defaults=response_defaults, + ) + except Exception as e: + logger.error(f"Failed to handle admin challeneg response, Error: {e}") async def handle_list_admin_removal(data, receiverId, signerId, receiptId): - logger.info(f"removing admin...: {data}, {receiverId}") - list_obj = await List.objects.aget(id=data["list_id"]) - - for acct in data["admins"]: - list_obj.admins.remove({"admins_id": acct}) # ?? - - activity = { - "signer_id": signerId, - "receiver_id": receiverId, - "timestamp": datetime.now(), - "type": "Remove_List_Admin", - "tx_hash": receiptId, - } + try: + + logger.info(f"removing admin...: {data}, {receiverId}") + list_obj = await List.objects.aget(id=data["list_id"]) + + for acct in data["admins"]: + list_obj.admins.remove({"admins_id": acct}) # maybe check - await Activity.objects.acreate(**activity) + activity = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": datetime.now(), + "tx_hash": receiptId, + } + + activity, activity_created = await Activity.objects.aupdate_or_create( + type="Remove_List_Admin", defaults=activity + ) + except Exception as e: + logger.warning(f"Failed to remove list admin, Error: {e}") # TODO: Need to abstract some actions. @@ -559,28 +660,32 @@ async def handle_new_donations( (donation_data.get("donated_at") or donation_data.get("donated_at_ms")) / 1000 ) - # Upsert donor account - donor, _ = await Account.objects.aget_or_create(id=donation_data["donor_id"]) - recipient = None + try: - if donation_data.get("recipient_id"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["recipient_id"] - ) - if donation_data.get("project_id"): - recipient, _ = await Account.objects.aget_or_create( - id=donation_data["project_id"] - ) + # Upsert donor account + donor, _ = await Account.objects.aget_or_create(id=donation_data["donor_id"]) + recipient = None - if donation_data.get("referrer_id"): - referrer, _ = await Account.objects.aget_or_create( - id=donation_data["referrer_id"] - ) + if donation_data.get("recipient_id"): + recipient, _ = await Account.objects.aget_or_create( + id=donation_data["recipient_id"] + ) + if donation_data.get("project_id"): + recipient, _ = await Account.objects.aget_or_create( + id=donation_data["project_id"] + ) - # Upsert token account - token_acct, _ = await Account.objects.aget_or_create( - id=(donation_data.get("ft_id") or "near") - ) + if donation_data.get("referrer_id"): + referrer, _ = await Account.objects.aget_or_create( + id=donation_data["referrer_id"] + ) + + # Upsert token account + token_acct, _ = await Account.objects.aget_or_create( + id=(donation_data.get("ft_id") or "near") + ) + except Exception as e: + logger.warning(f"Failed to create/get an account involved in donation: {e}") # # Upsert token # try: @@ -630,69 +735,78 @@ async def handle_new_donations( # total_amount_usd = None if not unit_price else unit_price * total_near_amount # net_amount_usd = None if not unit_price else unit_price * net_near_amount - total_amount = donation_data["total_amount"] - - logger.info(f"inserting donations... by {actionName}") - default_data = { - "donor": donor, - "total_amount": total_amount, - "total_amount_usd": None, # USD amounts will be added later (could be in pre-save hook) - "net_amount_usd": None, - "net_amount": net_amount, - "ft": token_acct, - "message": donation_data.get("message"), - "donated_at": donated_at, - "matching_pool": donation_data.get("matching_pool", False), - "recipient": recipient, - "protocol_fee": donation_data["protocol_fee"], - "referrer": referrer if donation_data.get("referrer_id") else None, - "referrer_fee": donation_data.get("referrer_fee"), - "tx_hash": receipt_obj.receipt_id, - } - logger.info(f"default donation data: {default_data}") + try: - if actionName != "direct": - logger.info("selecting pot to make public donation update") - pot = await Pot.objects.aget(id=receiverId) - default_data["pot"] = pot + total_amount = donation_data["total_amount"] + + logger.info(f"inserting donations... by {actionName}") + default_data = { + "donor": donor, + "total_amount": total_amount, + "total_amount_usd": None, # USD amounts will be added later (could be in pre-save hook) + "net_amount_usd": None, + "net_amount": net_amount, + "ft": token_acct, + "message": donation_data.get("message"), + "donated_at": donated_at, + "matching_pool": donation_data.get("matching_pool", False), + "recipient": recipient, + "protocol_fee": donation_data["protocol_fee"], + "referrer": referrer if donation_data.get("referrer_id") else None, + "referrer_fee": donation_data.get("referrer_fee"), + "tx_hash": receipt_obj.receipt_id, + } + logger.info(f"default donation data: {default_data}") - donation, donation_created = await Donation.objects.aupdate_or_create( - on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data - ) - logger.info(f"Created donation? {donation_created}") - - # fetch USD prices - await donation.fetch_usd_prices_async() # might not need to await this? - - # # convert total_amount_usd and net_amount_usd from None - # if total_amount_usd is None: - # total_amount_usd = 0.0 - # if net_amount_usd is None: - # net_amount_usd = 0.0 - - # Insert or update activity record - activity_type = ( - "Donate_Direct" - if actionName == "direct" - else ( - "Donate_Pot_Matching_Pool" - if donation.matching_pool - else "Donate_Pot_Public" + if actionName != "direct": + logger.info("selecting pot to make public donation update") + pot = await Pot.objects.aget(id=receiverId) + default_data["pot"] = pot + + donation, donation_created = await Donation.objects.aupdate_or_create( + on_chain_id=donation_data["id"], defaults={}, create_defaults=default_data ) - ) - defaults = { - "signer_id": signerId, - "receiver_id": receiverId, - "timestamp": donation.donated_at, - "tx_hash": receipt_obj.receipt_id, - } - activity, activity_created = await Activity.objects.aupdate_or_create( - action_result=donation_data, type=activity_type, defaults=defaults - ) - if activity_created: - logger.info(f"Activity created: {activity}") - else: - logger.info(f"Activity updated: {activity}") + logger.info(f"Created donation? {donation_created}") + + # fetch USD prices + await donation.fetch_usd_prices_async() # might not need to await this? + + # # convert total_amount_usd and net_amount_usd from None + # if total_amount_usd is None: + # total_amount_usd = 0.0 + # if net_amount_usd is None: + # net_amount_usd = 0.0 + + logger.info(f"Created donation? {donation_created}") + # Insert or update activity record + activity_type = ( + "Donate_Direct" + if actionName == "direct" + else ( + "Donate_Pot_Matching_Pool" + if donation.matching_pool + else "Donate_Pot_Public" + ) + ) + defaults = { + "signer_id": signerId, + "receiver_id": receiverId, + "timestamp": donation.donated_at, + "tx_hash": receipt_obj.receipt_id, + } + try: + + activity, activity_created = await Activity.objects.aupdate_or_create( + action_result=donation_data, type=activity_type, defaults=defaults + ) + if activity_created: + logger.info(f"Activity created: {activity}") + else: + logger.info(f"Activity updated: {activity}") + except Exception as e: + logger.info(f"Failed to create Activity: {e}") + except Exception as e: + logger.warning(f"Failed to create/update donation: {e}") ### COMMENTING OUT FOR NOW SINCE WE HAVE PERIODIC JOB RUNNING TO UPDATE ACCOUNT STATS (NB: DOESN'T CURRENTLY COVER POT STATS) ### CAN ALWAYS ADD BACK IF DESIRED diff --git a/lists/migrations/0002_alter_listupvote_options_and_more.py b/lists/migrations/0002_alter_listupvote_options_and_more.py new file mode 100644 index 0000000..af8f5c2 --- /dev/null +++ b/lists/migrations/0002_alter_listupvote_options_and_more.py @@ -0,0 +1,22 @@ +# Generated by Django 5.0.4 on 2024-05-08 15:13 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0001_initial"), + ("lists", "0001_initial"), + ] + + operations = [ + migrations.AlterModelOptions( + name="listupvote", + options={"verbose_name_plural": "ListUpvotes"}, + ), + migrations.AlterUniqueTogether( + name="listupvote", + unique_together={("list", "account")}, + ), + ] diff --git a/lists/models.py b/lists/models.py index a3e93df..7732f1e 100644 --- a/lists/models.py +++ b/lists/models.py @@ -101,6 +101,11 @@ class ListUpvote(models.Model): help_text=_("Upvote creation date."), ) + class Meta: + verbose_name_plural = "ListUpvotes" + + unique_together = (("list", "account"),) + class ListRegistration(models.Model): id = models.AutoField( diff --git a/pots/admin.py b/pots/admin.py index 24862fa..d49ee44 100644 --- a/pots/admin.py +++ b/pots/admin.py @@ -1,5 +1,14 @@ from django.contrib import admin -from .models import PotFactory, Pot, PotApplication, PotPayout, PotPayoutChallenge, PotPayoutChallengeAdminResponse + +from .models import ( + Pot, + PotApplication, + PotFactory, + PotPayout, + PotPayoutChallenge, + PotPayoutChallengeAdminResponse, +) + @admin.register(PotFactory) class PotFactoryAdmin(admin.ModelAdmin): @@ -32,6 +41,6 @@ class PotPayoutChallengeAdmin(admin.ModelAdmin): @admin.register(PotPayoutChallengeAdminResponse) class PotPayoutChallengeAdminResponseAdmin(admin.ModelAdmin): - list_display = ('id', 'challenge', 'admin', 'created_at', 'resolved') + list_display = ('id', 'pot', 'admin', 'created_at', 'resolved') search_fields = ('admin__id', 'challenge__id') list_filter = ('created_at', 'resolved') diff --git a/pots/migrations/0002_alter_potapplication_options_and_more.py b/pots/migrations/0002_alter_potapplication_options_and_more.py new file mode 100644 index 0000000..60f1eed --- /dev/null +++ b/pots/migrations/0002_alter_potapplication_options_and_more.py @@ -0,0 +1,73 @@ +# Generated by Django 5.0.4 on 2024-05-09 12:44 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("accounts", "0001_initial"), + ("pots", "0001_initial"), + ] + + operations = [ + migrations.AlterModelOptions( + name="potapplication", + options={"verbose_name_plural": "PotApplications"}, + ), + migrations.AlterModelOptions( + name="potapplicationreview", + options={"verbose_name_plural": "PotApplicationReviews"}, + ), + migrations.AlterModelOptions( + name="potpayoutchallenge", + options={"verbose_name_plural": "PayoutChallenges"}, + ), + migrations.AlterModelOptions( + name="potpayoutchallengeadminresponse", + options={"verbose_name_plural": "PotPayoutChallengeResponses"}, + ), + migrations.AddField( + model_name="potpayoutchallengeadminresponse", + name="challenger", + field=models.ForeignKey( + help_text="challenger being responded to.", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="payout_admin_responses", + to="accounts.account", + ), + ), + migrations.AddField( + model_name="potpayoutchallengeadminresponse", + name="pot", + field=models.ForeignKey( + help_text="Pot being challenged.", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="payout_responses", + to="pots.pot", + ), + ), + migrations.AlterUniqueTogether( + name="potpayoutchallengeadminresponse", + unique_together={("challenger", "pot", "created_at")}, + ), + migrations.AlterUniqueTogether( + name="potapplication", + unique_together={("pot", "applicant")}, + ), + migrations.AlterUniqueTogether( + name="potapplicationreview", + unique_together={("application", "reviewer", "reviewed_at")}, + ), + migrations.AlterUniqueTogether( + name="potpayoutchallenge", + unique_together={("challenger", "pot")}, + ), + migrations.RemoveField( + model_name="potpayoutchallengeadminresponse", + name="challenge", + ), + ] diff --git a/pots/models.py b/pots/models.py index 8941bec..b216713 100644 --- a/pots/models.py +++ b/pots/models.py @@ -332,6 +332,11 @@ class PotApplication(models.Model): help_text=_("Transaction hash."), ) + class Meta: + verbose_name_plural = "PotApplications" + + unique_together = (("pot", "applicant"),) + class PotApplicationReview(models.Model): id = models.AutoField( @@ -378,6 +383,11 @@ class PotApplicationReview(models.Model): help_text=_("Transaction hash."), ) + class Meta: + verbose_name_plural = "PotApplicationReviews" + + unique_together = (("application", "reviewer", "reviewed_at"),) + class PotPayout(models.Model): id = models.AutoField( @@ -468,6 +478,11 @@ class PotPayoutChallenge(models.Model): help_text=_("Challenge message."), ) + class Meta: + verbose_name_plural = "PayoutChallenges" + + unique_together = (("challenger", "pot"),) + class PotPayoutChallengeAdminResponse(models.Model): id = models.AutoField( @@ -475,13 +490,22 @@ class PotPayoutChallengeAdminResponse(models.Model): primary_key=True, help_text=_("Admin response id."), ) - challenge = models.ForeignKey( - PotPayoutChallenge, + challenger = models.ForeignKey( + Account, on_delete=models.CASCADE, - related_name="admin_responses", - null=False, - help_text=_("Challenge responded to."), + related_name="payout_admin_responses", + null=True, + help_text=_("challenger being responded to."), ) + + pot = models.ForeignKey( + Pot, + on_delete=models.CASCADE, + related_name="payout_responses", + null=True, + help_text=_("Pot being challenged."), + ) + admin = models.ForeignKey( Account, on_delete=models.CASCADE, @@ -511,3 +535,8 @@ class PotPayoutChallengeAdminResponse(models.Model): null=False, help_text=_("Transaction hash."), ) + + class Meta: + verbose_name_plural = "PotPayoutChallengeResponses" + + unique_together = (("challenger", "pot", "created_at"),) From a7de2d4c2ca5c9110d4784059cc15382a448eb38 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 10 May 2024 10:57:17 -0400 Subject: [PATCH 082/127] restart at block 106388588 --- indexer_app/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 8272711..f68814d 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -57,8 +57,8 @@ def listen_to_near_events(): try: # Update below with desired network & block height - start_block = get_block_height("current_block_height") - # start_block = 105_854_538 # manually setting for reindexing TODO: remove this + # start_block = get_block_height("current_block_height") + start_block = 106388590 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 1959ca0a5cc2f4989260761ef431540091e994b1 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 10 May 2024 10:57:45 -0400 Subject: [PATCH 083/127] restart at block 106388588 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index f68814d..b05a3be 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -58,7 +58,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 106388590 # manually setting for reindexing TODO: remove this + start_block = 106388588 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From d6534988ce5ff1d95cfd6c81f2f7c7f4f33a09fa Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 10 May 2024 11:03:28 -0400 Subject: [PATCH 084/127] restart indexer at 106431915 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index b05a3be..0219603 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -58,7 +58,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 106388588 # manually setting for reindexing TODO: remove this + start_block = 106431915 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From aa6a4670d9ea422250cba393afbc349ce81d3876 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 10 May 2024 11:06:00 -0400 Subject: [PATCH 085/127] restart at 106435734 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 0219603..7c7a402 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -58,7 +58,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 106431915 # manually setting for reindexing TODO: remove this + start_block = 106435734 # manually setting for reindexing TODO: remove this logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From 875bdb7615700a46466328591c31b5ca9ffcc57c Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 13 May 2024 10:31:38 -0400 Subject: [PATCH 086/127] add pattern matching for pot subaccount --- indexer_app/handler.py | 23 ++++++++++++++++++----- pots/utils.py | 11 ++++++++++- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/indexer_app/handler.py b/indexer_app/handler.py index 5c251cf..c0c67af 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -6,7 +6,10 @@ from near_lake_framework import near_primitives from base.utils import convert_ns_to_utc -from pots.utils import match_pot_factory_version_pattern +from pots.utils import ( + match_pot_factory_version_pattern, + match_pot_subaccount_version_pattern, +) from .logging import logger from .utils import ( @@ -129,7 +132,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess await handle_new_pot_factory( args_dict, receiver_id, created_at ) - else: + elif match_pot_subaccount_version_pattern( + receipt.receiver_id + ): logger.info( f"new pot deployment: {args_dict}, {action}" ) @@ -271,14 +276,22 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess case "challenge_payouts": logger.info(f"challenge payout: {args_dict}") await handle_payout_challenge( - args_dict, receiver_id, signer_id, receipt.receipt_id, created_at + args_dict, + receiver_id, + signer_id, + receipt.receipt_id, + created_at, ) break - + case "admin_update_payouts_challenge": logger.info(f"challenge payout: {args_dict}") await handle_payout_challenge_response( - args_dict, receiver_id, signer_id, receipt.receipt_id, created_at + args_dict, + receiver_id, + signer_id, + receipt.receipt_id, + created_at, ) break diff --git a/pots/utils.py b/pots/utils.py index 30d4c14..01bfab6 100644 --- a/pots/utils.py +++ b/pots/utils.py @@ -1,6 +1,15 @@ import re +BASE_PATTERN = r"v\d+\.potfactory\.potlock\.near$" + def match_pot_factory_version_pattern(receiver): - pattern = r"^v\d+\.potfactory\.potlock\.near$" + """Matches the base pot factory version pattern without a subaccount.""" + pattern = f"^{BASE_PATTERN}" + return bool(re.match(pattern, receiver)) + + +def match_pot_subaccount_version_pattern(receiver): + """Matches the pot factory version pattern with a subaccount.""" + pattern = f"^[a-zA-Z0-9_]+\.{BASE_PATTERN}" return bool(re.match(pattern, receiver)) From fd0e2d2942e3e8cbdffefc0ad1ac666a930d33b4 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 14 May 2024 20:59:09 -0400 Subject: [PATCH 087/127] add cors headers & localhost support --- base/settings.py | 6 ++++++ indexer_app/tasks.py | 2 +- pyproject.toml | 1 + 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/base/settings.py b/base/settings.py index 02a6664..698936e 100644 --- a/base/settings.py +++ b/base/settings.py @@ -67,6 +67,7 @@ "django.contrib.messages", "django.contrib.staticfiles", "rest_framework", + "corsheaders", # "cachalot", "celery", "api", @@ -88,6 +89,7 @@ MIDDLEWARE = [ + "corsheaders.middleware.CorsMiddleware", "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", @@ -117,6 +119,10 @@ WSGI_APPLICATION = "base.wsgi.application" +CORS_ALLOWED_ORIGINS = [ + "http://localhost:3000", +] + # REDIS / CACHE CONFIGS REDIS_SCHEMA = ( diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 7c7a402..b5fa879 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -58,7 +58,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 106435734 # manually setting for reindexing TODO: remove this + start_block = 111854642 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: diff --git a/pyproject.toml b/pyproject.toml index 2dd543a..1bb5530 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,7 @@ django-redis = "^5.4.0" gunicorn = "^22.0.0" sentry-sdk = {extras = ["django"], version = "^1.45.0"} watchtower = "^3.1.0" +django-cors-headers = "^4.3.1" [tool.poetry.group.dev.dependencies] black = "^24.3.0" From 137e348c09673001b3e0cf4291f8275e6dc04cb1 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 14 May 2024 21:08:52 -0400 Subject: [PATCH 088/127] fix virtual env usage in after_install.sh --- scripts/after_install.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 4ffdb25..a52c69c 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -43,23 +43,23 @@ echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" # Log the full output of showmigrations echo "Checking for pending migrations..." >> "$LOG_FILE" -python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose # Check for unapplied migrations -PENDING_MIGRATIONS=$(python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations if [ "$PENDING_MIGRATIONS" -gt 0 ]; then echo "Migrations found; stopping services..." >> "$LOG_FILE" sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat echo 'Applying migrations...' >> "$LOG_FILE" - python manage.py migrate >> "$LOG_FILE" 2>&1 + poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 echo 'Starting services...' >> "$LOG_FILE" sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat else echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" - python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 + poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat fi From 466c0936ac521a7c7be04be5384ad5cce7b36058 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 14 May 2024 21:13:07 -0400 Subject: [PATCH 089/127] update after_install.sh --- scripts/after_install.sh | 80 +++++++++++++++++++++++++++++++++++----- 1 file changed, 70 insertions(+), 10 deletions(-) diff --git a/scripts/after_install.sh b/scripts/after_install.sh index a52c69c..4b921ba 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -28,22 +28,14 @@ PROJECT_DIR="/home/ec2-user/django-indexer" # Navigate to the project directory cd "$PROJECT_DIR" -# Source the specific poetry virtual environment -source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" - # Install dependencies using Poetry echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" poetry install >> "$LOG_FILE" echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" -# Check if there are pending migrations and log the output -echo "Checking for pending migrations..." >> "$LOG_FILE" -PENDING_MIGRATIONS=$(python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout -echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" - -# Log the full output of showmigrations +# Log the full output of showmigrations to diagnose echo "Checking for pending migrations..." >> "$LOG_FILE" -poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Check for unapplied migrations PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations @@ -64,3 +56,71 @@ else fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" + + +# #!/bin/bash +# # Log output to a specific file +# LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" + +# echo -e "\n\n" >> "$LOG_FILE" +# echo "=========================================" >> "$LOG_FILE" +# echo "Running after_install.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +# echo "=========================================" >> "$LOG_FILE" + +# # Load env vars +# source /home/ec2-user/.bashrc + +# # Set correct ownership recursively for project directory +# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# # Set the necessary permissions +# sudo chmod -R 775 /home/ec2-user/django-indexer/ +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# # Restart nginx to apply any configuration changes +# sudo systemctl restart nginx +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# # Define the project directory +# PROJECT_DIR="/home/ec2-user/django-indexer" + +# # Navigate to the project directory +# cd "$PROJECT_DIR" + +# # Source the specific poetry virtual environment +# # source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" + +# # Install dependencies using Poetry +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +# poetry install >> "$LOG_FILE" +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# # Check if there are pending migrations and log the output +# echo "Checking for pending migrations..." >> "$LOG_FILE" +# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +# echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# # Log the full output of showmigrations +# echo "Checking for pending migrations..." >> "$LOG_FILE" +# poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# # Check for unapplied migrations +# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +# if [ "$PENDING_MIGRATIONS" -gt 0 ]; then +# echo "Migrations found; stopping services..." >> "$LOG_FILE" +# sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat + +# echo 'Applying migrations...' >> "$LOG_FILE" +# poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + +# echo 'Starting services...' >> "$LOG_FILE" +# sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat +# else +# echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" +# poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 +# sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat +# fi + +# echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 7d83b98c0cd55bcbc731a12723cd036243aef088 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 09:37:15 -0400 Subject: [PATCH 090/127] revert after_install --- indexer_app/tasks.py | 2 +- scripts/after_install.sh | 141 ++++++++++++++++++++------------------- 2 files changed, 72 insertions(+), 71 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index b5fa879..e0591f1 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -58,7 +58,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 111854642 + start_block = 109991746 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: diff --git a/scripts/after_install.sh b/scripts/after_install.sh index 4b921ba..6c56c5b 100755 --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -1,63 +1,3 @@ -#!/bin/bash -# Log output to a specific file -LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" - -echo -e "\n\n" >> "$LOG_FILE" -echo "=========================================" >> "$LOG_FILE" -echo "Running after_install.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" -echo "=========================================" >> "$LOG_FILE" - -# Load env vars -source /home/ec2-user/.bashrc - -# Set correct ownership recursively for project directory -sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ -echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" - -# Set the necessary permissions -sudo chmod -R 775 /home/ec2-user/django-indexer/ -echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" - -# Restart nginx to apply any configuration changes -sudo systemctl restart nginx -echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" - -# Define the project directory -PROJECT_DIR="/home/ec2-user/django-indexer" - -# Navigate to the project directory -cd "$PROJECT_DIR" - -# Install dependencies using Poetry -echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" -poetry install >> "$LOG_FILE" -echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" - -# Log the full output of showmigrations to diagnose -echo "Checking for pending migrations..." >> "$LOG_FILE" -poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 - -# Check for unapplied migrations -PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations - -if [ "$PENDING_MIGRATIONS" -gt 0 ]; then - echo "Migrations found; stopping services..." >> "$LOG_FILE" - sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat - - echo 'Applying migrations...' >> "$LOG_FILE" - poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 - - echo 'Starting services...' >> "$LOG_FILE" - sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat -else - echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" - poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 - sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat -fi - -echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" - - # #!/bin/bash # # Log output to a specific file # LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" @@ -88,22 +28,14 @@ echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" # # Navigate to the project directory # cd "$PROJECT_DIR" -# # Source the specific poetry virtual environment -# # source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" - # # Install dependencies using Poetry # echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" # poetry install >> "$LOG_FILE" # echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" -# # Check if there are pending migrations and log the output +# # Log the full output of showmigrations to diagnose # echo "Checking for pending migrations..." >> "$LOG_FILE" -# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout -# echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" - -# # Log the full output of showmigrations -# echo "Checking for pending migrations..." >> "$LOG_FILE" -# poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose +# poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # # Check for unapplied migrations # PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations @@ -124,3 +56,72 @@ echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" # fi # echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" + + + +#!/bin/bash +# Log output to a specific file +LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" + +echo -e "\n\n" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" +echo "Running after_install.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" + +# Load env vars +source /home/ec2-user/.bashrc + +# Set correct ownership recursively for project directory +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# Set the necessary permissions +sudo chmod -R 775 /home/ec2-user/django-indexer/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# Restart nginx to apply any configuration changes +sudo systemctl restart nginx +echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# Define the project directory +PROJECT_DIR="/home/ec2-user/django-indexer" + +# Navigate to the project directory +cd "$PROJECT_DIR" + +# Source the specific poetry virtual environment +source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" + +# Install dependencies using Poetry +echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +poetry install >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# Check if there are pending migrations and log the output +echo "Checking for pending migrations..." >> "$LOG_FILE" +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# Log the full output of showmigrations +echo "Checking for pending migrations..." >> "$LOG_FILE" +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# Check for unapplied migrations +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +if [ "$PENDING_MIGRATIONS" -gt 0 ]; then + echo "Migrations found; stopping services..." >> "$LOG_FILE" + sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat + + echo 'Applying migrations...' >> "$LOG_FILE" + poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + + echo 'Starting services...' >> "$LOG_FILE" + sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat +else + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 + sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat +fi + +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" From 7b8f79affca335c5c7de2ae38dc532242d8f0f82 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 09:42:51 -0400 Subject: [PATCH 091/127] update start block to 110046700 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index e0591f1..b35c384 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -58,7 +58,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 109991746 + start_block = 110046700 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) finally: From b8f87aaa3d62f1b86aa3a45376601d5fdc19be9e Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 09:55:26 -0400 Subject: [PATCH 092/127] avoid logging WorkerLostErrors to Sentry --- indexer_app/tasks.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index b35c384..84bb319 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -2,8 +2,9 @@ import logging from pathlib import Path +from billiard.exceptions import WorkerLostError from celery import shared_task -from celery.signals import task_revoked +from celery.signals import task_revoked, worker_shutdown from django.conf import settings from django.db import transaction from django.db.models import Count, Q, Sum @@ -58,13 +59,25 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 110046700 + start_block = 111932487 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) + except WorkerLostError: + pass # don't log to Sentry finally: loop.close() +@worker_shutdown.connect +def worker_shutdown_handler(sig, how, exitcode, **kwargs): + if sig == 15: + logger.info( + "Celery worker shutdown initiated by signal 15 (SIGTERM)." + ) # avoid logging to Sentry + else: + logger.error("Celery worker shutdown due to signal %d.", sig) + + jobs_logger = logging.getLogger("jobs") From fa5c4bc011acbe595483b025a8d798a3a93d0ba8 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 10:00:20 -0400 Subject: [PATCH 093/127] update start block to 111941200 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 84bb319..c6bd07e 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 111932487 + start_block = 111941200 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From e83dda82de214fb705a8fb1e77433344777d78fe Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 10:10:42 -0400 Subject: [PATCH 094/127] update start block to 112034320 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index c6bd07e..595adb9 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 111941200 + start_block = 112034320 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From f61cc0debcb35d3df4dd3e0376898441ad7c42c8 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 10:14:58 -0400 Subject: [PATCH 095/127] update start block to 112158850 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 595adb9..5ed211e 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 112034320 + start_block = 112158850 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From eaf0cf32a03824944ff62bb265de953162cc8f6b Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 10:29:46 -0400 Subject: [PATCH 096/127] update start block to 112461673 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 5ed211e..75e30da 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 112158850 + start_block = 112461673 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From c2cc3674d306ce7d5034510040b30a0d73ad45ce Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 13:15:57 -0400 Subject: [PATCH 097/127] fix display of admins & wl deployers in PotFactory detail (admin) --- pots/admin.py | 69 ++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 52 insertions(+), 17 deletions(-) diff --git a/pots/admin.py b/pots/admin.py index d49ee44..03cdba9 100644 --- a/pots/admin.py +++ b/pots/admin.py @@ -1,5 +1,8 @@ +from django import forms from django.contrib import admin +from accounts.models import Account + from .models import ( Pot, PotApplication, @@ -10,37 +13,69 @@ ) +class PotFactoryForm(forms.ModelForm): + class Meta: + model = PotFactory + fields = "__all__" + + def __init__(self, *args, **kwargs): + super(PotFactoryForm, self).__init__(*args, **kwargs) + # Ensure self.instance is available before accessing it + if self.instance.pk: + # Set the queryset for the admins field to only include relevant accounts + self.fields["admins"].queryset = self.instance.admins.all() + # Set the queryset for the whitelisted_deployers field to only include relevant accounts + self.fields["whitelisted_deployers"].queryset = ( + self.instance.whitelisted_deployers.all() + ) + + @admin.register(PotFactory) class PotFactoryAdmin(admin.ModelAdmin): - list_display = ('id', 'owner', 'deployed_at') - search_fields = ('id', 'owner__id') + form = PotFactoryForm + list_display = ("id", "owner", "deployed_at") + search_fields = ("id", "owner__id") + + def get_form(self, request, obj=None, **kwargs): + form = super(PotFactoryAdmin, self).get_form(request, obj, **kwargs) + if obj: + form.base_fields["admins"].queryset = obj.admins.all() + form.base_fields["whitelisted_deployers"].queryset = ( + obj.whitelisted_deployers.all() + ) + return form + @admin.register(Pot) class PotAdmin(admin.ModelAdmin): - list_display = ('id', 'pot_factory', 'deployer', 'deployed_at', 'name') - search_fields = ('id', 'name', 'deployer__id') - list_filter = ('deployed_at',) + list_display = ("id", "pot_factory", "deployer", "deployed_at", "name") + search_fields = ("id", "name", "deployer__id") + list_filter = ("deployed_at",) + @admin.register(PotApplication) class PotApplicationAdmin(admin.ModelAdmin): - list_display = ('id', 'pot', 'applicant', 'status', 'submitted_at') - search_fields = ('pot__id', 'applicant__id') - list_filter = ('status', 'submitted_at') + list_display = ("id", "pot", "applicant", "status", "submitted_at") + search_fields = ("pot__id", "applicant__id") + list_filter = ("status", "submitted_at") + @admin.register(PotPayout) class PotPayoutAdmin(admin.ModelAdmin): - list_display = ('id', 'pot', 'recipient', 'amount', 'paid_at') - search_fields = ('pot__id', 'recipient__id') - list_filter = ('paid_at',) + list_display = ("id", "pot", "recipient", "amount", "paid_at") + search_fields = ("pot__id", "recipient__id") + list_filter = ("paid_at",) + @admin.register(PotPayoutChallenge) class PotPayoutChallengeAdmin(admin.ModelAdmin): - list_display = ('id', 'challenger', 'pot', 'created_at') - search_fields = ('challenger__id', 'pot__id') - list_filter = ('created_at',) + list_display = ("id", "challenger", "pot", "created_at") + search_fields = ("challenger__id", "pot__id") + list_filter = ("created_at",) + @admin.register(PotPayoutChallengeAdminResponse) class PotPayoutChallengeAdminResponseAdmin(admin.ModelAdmin): - list_display = ('id', 'pot', 'admin', 'created_at', 'resolved') - search_fields = ('admin__id', 'challenge__id') - list_filter = ('created_at', 'resolved') + list_display = ("id", "pot", "admin", "created_at", "resolved") + search_fields = ("admin__id", "challenge__id") + list_filter = ("created_at", "resolved") From 6456274fb49efcc92e689d99cb40a458e8446dcf Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 13:20:28 -0400 Subject: [PATCH 098/127] update verbose name plurals --- lists/models.py | 2 +- pots/models.py | 11 +++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/lists/models.py b/lists/models.py index 7732f1e..ad412d5 100644 --- a/lists/models.py +++ b/lists/models.py @@ -102,7 +102,7 @@ class ListUpvote(models.Model): ) class Meta: - verbose_name_plural = "ListUpvotes" + verbose_name_plural = "List Upvotes" unique_together = (("list", "account"),) diff --git a/pots/models.py b/pots/models.py index b216713..a495aea 100644 --- a/pots/models.py +++ b/pots/models.py @@ -57,6 +57,9 @@ class PotFactory(models.Model): help_text=_("Require whitelist."), ) + class Meta: + verbose_name_plural = "Pot Factories" + class Pot(models.Model): id = models.OneToOneField( @@ -333,7 +336,7 @@ class PotApplication(models.Model): ) class Meta: - verbose_name_plural = "PotApplications" + verbose_name_plural = "Pot Applications" unique_together = (("pot", "applicant"),) @@ -384,7 +387,7 @@ class PotApplicationReview(models.Model): ) class Meta: - verbose_name_plural = "PotApplicationReviews" + verbose_name_plural = "Pot Application Reviews" unique_together = (("application", "reviewer", "reviewed_at"),) @@ -479,7 +482,7 @@ class PotPayoutChallenge(models.Model): ) class Meta: - verbose_name_plural = "PayoutChallenges" + verbose_name_plural = "Payout Challenges" unique_together = (("challenger", "pot"),) @@ -537,6 +540,6 @@ class PotPayoutChallengeAdminResponse(models.Model): ) class Meta: - verbose_name_plural = "PotPayoutChallengeResponses" + verbose_name_plural = "Payout Challenge Responses" unique_together = (("challenger", "pot", "created_at"),) From eeeb00e92002d7ccc35d6c5690be286140788e2f Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 13:21:43 -0400 Subject: [PATCH 099/127] add Pot custom form --- pots/admin.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/pots/admin.py b/pots/admin.py index 03cdba9..c934392 100644 --- a/pots/admin.py +++ b/pots/admin.py @@ -46,12 +46,32 @@ def get_form(self, request, obj=None, **kwargs): return form +class PotForm(forms.ModelForm): + class Meta: + model = Pot + fields = "__all__" + + def __init__(self, *args, **kwargs): + super(PotForm, self).__init__(*args, **kwargs) + # Ensure self.instance is available before accessing it + if self.instance.pk: + # Set the queryset for the admins field to only include relevant accounts + self.fields["admins"].queryset = self.instance.admins.all() + + @admin.register(Pot) class PotAdmin(admin.ModelAdmin): + form = PotForm list_display = ("id", "pot_factory", "deployer", "deployed_at", "name") search_fields = ("id", "name", "deployer__id") list_filter = ("deployed_at",) + def get_form(self, request, obj=None, **kwargs): + form = super(PotAdmin, self).get_form(request, obj, **kwargs) + if obj: + form.base_fields["admins"].queryset = obj.admins.all() + return form + @admin.register(PotApplication) class PotApplicationAdmin(admin.ModelAdmin): From 898a512dd85ea572621fcec7357140da126670a9 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 14:59:24 -0400 Subject: [PATCH 100/127] add admin for PotApplicationReview --- pots/admin.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pots/admin.py b/pots/admin.py index c934392..09e40b3 100644 --- a/pots/admin.py +++ b/pots/admin.py @@ -6,6 +6,7 @@ from .models import ( Pot, PotApplication, + PotApplicationReview, PotFactory, PotPayout, PotPayoutChallenge, @@ -80,6 +81,21 @@ class PotApplicationAdmin(admin.ModelAdmin): list_filter = ("status", "submitted_at") +@admin.register(PotApplicationReview) +class PotApplicationReviewAdmin(admin.ModelAdmin): + list_display = ( + "id", + "application", + "reviewer", + "notes", + "status", + "reviewed_at", + "tx_hash", + ) + search_fields = ("application__id", "reviewer__id") + list_filter = ("status", "reviewed_at") + + @admin.register(PotPayout) class PotPayoutAdmin(admin.ModelAdmin): list_display = ("id", "pot", "recipient", "amount", "paid_at") From 16e046842de5b3e98774a882c8c74dc5c4fed5e5 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Mon, 20 May 2024 15:06:11 -0400 Subject: [PATCH 101/127] comment out worker_shutdown_handler --- indexer_app/tasks.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 75e30da..90056d0 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -68,14 +68,14 @@ def listen_to_near_events(): loop.close() -@worker_shutdown.connect -def worker_shutdown_handler(sig, how, exitcode, **kwargs): - if sig == 15: - logger.info( - "Celery worker shutdown initiated by signal 15 (SIGTERM)." - ) # avoid logging to Sentry - else: - logger.error("Celery worker shutdown due to signal %d.", sig) +# @worker_shutdown.connect +# def worker_shutdown_handler(sig, how, exitcode, **kwargs): +# if sig == 15: +# logger.info( +# "Celery worker shutdown initiated by signal 15 (SIGTERM)." +# ) # avoid logging to Sentry +# else: +# logger.error("Celery worker shutdown due to signal %d.", sig) jobs_logger = logging.getLogger("jobs") From fb4614fb7dc61d12b541940265c493f1389a2981 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 21 May 2024 15:07:23 -0400 Subject: [PATCH 102/127] fix tuple error when creating pot, update start block to 112961757 to reindex --- indexer_app/tasks.py | 2 +- indexer_app/utils.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 90056d0..706ba85 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 112461673 + start_block = 112961757 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: diff --git a/indexer_app/utils.py b/indexer_app/utils.py index e3fd803..9e05d25 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -98,7 +98,7 @@ async def handle_new_pot( "all_paid_out": False, "protocol_config_provider": data["protocol_config_provider"], } - potObject = await Pot.objects.aupdate_or_create( + pot, created = await Pot.objects.aupdate_or_create( id=receiver, defaults=pot_defaults ) @@ -106,7 +106,7 @@ async def handle_new_pot( if data.get("admins"): for admin_id in data["admins"]: admin, _ = await Account.objects.aget_or_create(id=admin_id) - potObject.admins.aadd(admin) + pot.admins.aadd(admin) defaults = { "signer_id": signerId, From e160c50b26c519afb6646e766254fa741a60b120 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 10:55:55 -0400 Subject: [PATCH 103/127] update start block to 112920268 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 706ba85..9100cb6 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 112961757 + start_block = 112920268 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From cb8ed9be23bf76eead20f80cf30511846a033b14 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 11:23:09 -0400 Subject: [PATCH 104/127] update start block to 113030298 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 9100cb6..ac41d84 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 112920268 + start_block = 113030298 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From 3e4678bd1fa03889b8e3e778f1ff56c97f1e8adc Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 12:02:17 -0400 Subject: [PATCH 105/127] update start block to 113042335 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index ac41d84..654cd9f 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113030298 + start_block = 113042335 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From fcd90b8554c90f38c7b4f6a7a2f995b17c7f0b43 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 12:57:48 -0400 Subject: [PATCH 106/127] update start block to 113058915 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 654cd9f..1c2f7d2 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113042335 + start_block = 113058915 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From 12e0db412b125d715edca5a7f62346ec164d6228 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:02:06 -0400 Subject: [PATCH 107/127] update start block to 113066329 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 1c2f7d2..b99358a 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113058915 + start_block = 113066329 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From 6307a8905c8188c774d520752cfe92e80afafc71 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:11:24 -0400 Subject: [PATCH 108/127] update start block to 113155075 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index b99358a..0af92e7 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113066329 + start_block = 113155075 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From 19e1916b187388cb7b73f919d8e6daf5b7f5188a Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:21:48 -0400 Subject: [PATCH 109/127] update start block to 113284717 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 0af92e7..e78ac36 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113155075 + start_block = 113284717 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From 93db4c074b3e3583a72ec53cb21bee8bf58d3b6c Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:25:49 -0400 Subject: [PATCH 110/127] update start block to 113455493 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index e78ac36..f36e89e 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113284717 + start_block = 113455493 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From ee1827dfc61478c75c6917bb76f715b2017678f7 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:33:21 -0400 Subject: [PATCH 111/127] update start block to 113613466 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index f36e89e..4a39ad1 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113455493 + start_block = 113613466 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From ff92358c35392b1512e993a7d5475382f75ad1ce Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:37:45 -0400 Subject: [PATCH 112/127] update start block to 113614770 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 4a39ad1..abdd121 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113613466 + start_block = 113614770 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From fc024b94ac1c372007a9458b637fb5d46104d34e Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:43:42 -0400 Subject: [PATCH 113/127] update start block to 114027103 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index abdd121..626d640 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 113614770 + start_block = 114027103 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From a7f097a888a167791c5b0f3214c935bbfd9d3cce Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 13:58:45 -0400 Subject: [PATCH 114/127] update start block to 114037338 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 626d640..2059b41 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 114027103 + start_block = 114037338 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From c8275976b905a1621074774a09e38287298cd572 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 22 May 2024 14:08:56 -0400 Subject: [PATCH 115/127] update start block to 114056625 --- indexer_app/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 2059b41..db6cdbb 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 114037338 + start_block = 114056625 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From ba8c3e9117a4046745dd6196f262936f10fe4231 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Thu, 23 May 2024 14:20:30 -0400 Subject: [PATCH 116/127] handle None pot application & update start block --- indexer_app/handler.py | 2 +- indexer_app/tasks.py | 2 +- indexer_app/utils.py | 32 +++++++++++++++++++------------- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/indexer_app/handler.py b/indexer_app/handler.py index c0c67af..9d86fbf 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -147,7 +147,7 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess created_at, ) break - + # TODO: update to use handle_apply method?? case "assert_can_apply_callback": logger.info( f"application case: {args_dict}, {action}, {receipt}" diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index db6cdbb..99ea51a 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 114056625 + start_block = 112952680 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 9e05d25..7e44c2f 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -385,7 +385,13 @@ async def handle_pot_application_status_change( # Retrieve the PotApplication object appl = await PotApplication.objects.filter( applicant_id=data["project_id"] - ).afirst() # TODO: handle this being None + ).afirst() + + if not appl: + logger.error( + f"PotApplication object not found for project_id: {data['project_id']}" + ) + return # Create the PotApplicationReview object logger.info(f"create review...... {appl}") @@ -411,7 +417,7 @@ async def handle_pot_application_status_change( logger.info("PotApplicationReview and PotApplication updated successfully.") except Exception as e: - logger.warning(f"Failed to change pot application status, Error: {e}") + logger.error(f"Failed to change pot application status, Error: {e}") async def handle_default_list_status_change( @@ -442,7 +448,7 @@ async def handle_default_list_status_change( logger.info("List updated successfully.") except Exception as e: - logger.warning(f"Failed to change list status, Error: {e}") + logger.error(f"Failed to change list status, Error: {e}") async def handle_list_upvote( @@ -478,7 +484,7 @@ async def handle_list_upvote( f"Upvote and activity records created successfully. {activity_created}" ) except Exception as e: - logger.warning(f"Failed to upvote list, Error: {e}") + logger.error(f"Failed to upvote list, Error: {e}") async def handle_set_payouts(data: dict, receiverId: str, receipt: Receipt): @@ -500,7 +506,7 @@ async def handle_set_payouts(data: dict, receiverId: str, receipt: Receipt): await PotPayout.objects.abulk_create(insertion_data, ignore_conflicts=True) except Exception as e: - logger.warning(f"Failed to set payouts, Error: {e}") + logger.error(f"Failed to set payouts, Error: {e}") async def handle_transfer_payout( @@ -520,7 +526,7 @@ async def handle_transfer_payout( **payout ) except Exception as e: - logger.warning(f"Failed to create payout data, Error: {e}") + logger.error(f"Failed to create payout data, Error: {e}") async def handle_payout_challenge( @@ -549,7 +555,7 @@ async def handle_payout_challenge( action_result=payoutChallenge, type="Challenge_Payout", defaults=defaults ) except Exception as e: - logger.warning(f"Failed to create payoutchallenge, Error: {e}") + logger.error(f"Failed to create payoutchallenge, Error: {e}") async def handle_payout_challenge_response( @@ -593,7 +599,7 @@ async def handle_list_admin_removal(data, receiverId, signerId, receiptId): type="Remove_List_Admin", defaults=activity ) except Exception as e: - logger.warning(f"Failed to remove list admin, Error: {e}") + logger.error(f"Failed to remove list admin, Error: {e}") # TODO: Need to abstract some actions. @@ -685,7 +691,7 @@ async def handle_new_donations( id=(donation_data.get("ft_id") or "near") ) except Exception as e: - logger.warning(f"Failed to create/get an account involved in donation: {e}") + logger.error(f"Failed to create/get an account involved in donation: {e}") # # Upsert token # try: @@ -704,10 +710,10 @@ async def handle_new_donations( # response = requests.get(endpoint) # logger.info(f"response: {response}") # if response.status_code == 429: - # logger.warning("Coingecko rate limit exceeded") + # logger.error("Coingecko rate limit exceeded") # price_data = response.json() # except Exception as e: - # logger.warning(f"Failed to fetch price data: {e}") + # logger.error(f"Failed to fetch price data: {e}") # logger.info(f"price data: {price_data}") # unit_price = price_data.get("market_data", {}).get("current_price", {}).get("usd") # logger.info(f"unit price: {unit_price}") @@ -719,7 +725,7 @@ async def handle_new_donations( # timestamp=donated_at, # ) # except Exception as e: - # logger.warning( + # logger.error( # f"Error creating TokenHistoricalPrice: {e} token: {token} unit_price: {unit_price}" # ) # # historical_price = await token.get_most_recent_price() # to use model methods, we might have to use asgiref sync_to_async @@ -806,7 +812,7 @@ async def handle_new_donations( except Exception as e: logger.info(f"Failed to create Activity: {e}") except Exception as e: - logger.warning(f"Failed to create/update donation: {e}") + logger.error(f"Failed to create/update donation: {e}") ### COMMENTING OUT FOR NOW SINCE WE HAVE PERIODIC JOB RUNNING TO UPDATE ACCOUNT STATS (NB: DOESN'T CURRENTLY COVER POT STATS) ### CAN ALWAYS ADD BACK IF DESIRED From 48b20ec87910ec5b1e0499cc05edd2a79b7f91c5 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Thu, 23 May 2024 16:41:38 -0400 Subject: [PATCH 117/127] use coingecko pro api --- base/settings.py | 7 ++++++- donations/models.py | 2 ++ indexer_app/tasks.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/base/settings.py b/base/settings.py index 698936e..ce99c04 100644 --- a/base/settings.py +++ b/base/settings.py @@ -37,6 +37,7 @@ AWS_SECRET_ACCESS_KEY = os.environ.get("PL_AWS_SECRET_ACCESS_KEY") # CACHALOT_ENABLED = strtobool(os.environ.get("PL_CACHALOT_ENABLED", "False")) # CACHALOT_TIMEOUT = os.environ.get("PL_CACHALOT_TIMEOUT") +COINGECKO_API_KEY = os.environ.get("PL_COINGECKO_API_KEY") DEBUG = strtobool(os.environ.get("PL_DEBUG", "False")) ENVIRONMENT = os.environ.get("PL_ENVIRONMENT", "local") LOG_LEVEL = os.getenv("PL_LOG_LEVEL", "INFO").upper() @@ -53,7 +54,11 @@ BLOCK_SAVE_HEIGHT = os.environ.get("BLOCK_SAVE_HEIGHT") -COINGECKO_URL = "https://api.coingecko.com/api/v3" +COINGECKO_URL = ( + "https://pro-api.coingecko.com/api/v3" + if COINGECKO_API_KEY + else "https://api.coingecko.com/api/v3" +) # Number of hours around a given timestamp for querying historical prices HISTORICAL_PRICE_QUERY_HOURS = 24 diff --git a/donations/models.py b/donations/models.py index bc671cc..b101250 100644 --- a/donations/models.py +++ b/donations/models.py @@ -241,6 +241,8 @@ def fetch_usd_prices(self): "No existing price within acceptable time period; fetching historical price..." ) endpoint = f"{settings.COINGECKO_URL}/coins/{self.ft.id}/history?date={format_date(self.donated_at)}&localization=false" + if settings.COINGECKO_API_KEY: + endpoint += f"&x_cg_pro_api_key={settings.COINGECKO_API_KEY}" logger.info(f"coingecko endpoint: {endpoint}") response = requests.get(endpoint) logger.info(f"coingecko response: {response}") diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 99ea51a..1ae32c2 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -59,7 +59,7 @@ def listen_to_near_events(): try: # Update below with desired network & block height # start_block = get_block_height("current_block_height") - start_block = 112952680 + start_block = 112959664 logger.info(f"what's the start block, pray tell? {start_block-1}") loop.run_until_complete(indexer("mainnet", start_block - 1, None)) except WorkerLostError: From c7274aa2063285afaf2ea68822fd0927ca7f504a Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Wed, 29 May 2024 16:20:50 -0400 Subject: [PATCH 118/127] choose LakeConfig network based on PL_ENVIRONMENT --- indexer_app/tasks.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/indexer_app/tasks.py b/indexer_app/tasks.py index 1ae32c2..4d3f179 100644 --- a/indexer_app/tasks.py +++ b/indexer_app/tasks.py @@ -19,15 +19,23 @@ from .utils import cache_block_height, get_block_height -async def indexer(network: str, from_block: int, to_block: int): +async def indexer(from_block: int, to_block: int): """ Runs the lake indexer framework """ # Initialize lake indexer logger.info(f"from block: {from_block}") - lake_config = LakeConfig.mainnet() if network == "mainnet" else LakeConfig.testnet() + lake_config = ( + LakeConfig.testnet() + if settings.ENVIRONMENT == "testnet" + else LakeConfig.mainnet() + ) lake_config.start_block_height = ( - from_block if from_block else logger.info("Starting to index from latest block") + from_block + if from_block + else logger.info( + "Starting to index from latest block" + ) # TODO: wtf is this shitty code ) lake_config.aws_access_key_id = settings.AWS_ACCESS_KEY_ID lake_config.aws_secret_key = settings.AWS_SECRET_ACCESS_KEY @@ -58,10 +66,10 @@ def listen_to_near_events(): try: # Update below with desired network & block height - # start_block = get_block_height("current_block_height") - start_block = 112959664 + start_block = get_block_height("current_block_height") + # start_block = 112959664 logger.info(f"what's the start block, pray tell? {start_block-1}") - loop.run_until_complete(indexer("mainnet", start_block - 1, None)) + loop.run_until_complete(indexer(start_block - 1, None)) except WorkerLostError: pass # don't log to Sentry finally: From a8fd173bf89a36fd7bcb2a957c863ca6faf82b5e Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 31 May 2024 17:31:35 -0400 Subject: [PATCH 119/127] Testnet -> Dev (#26) * add deploy-testnet.yml * add testnet deployment workflow & appspec * testnet workflow debugging * upload appspec as artifact * upload zipped codebase to s3 in workflow * continue working on after_install_testnet * separate watchtower log groups by django env * add testnet potlock TLA to indexer handler * add POTLOCK_TLA to settings * add dev.potlock.io and test-dev.potlock.io to allowed hosts * remove admin dashboard edit access * remove admin edit permission for all models * add timestamp to deployment zip filename * add env vars to deploy-testnet * add after_install_dev script --- .github/workflows/deploy-dev.yml | 51 ++++++++++ .github/workflows/deploy-testnet.yml | 66 +++++++++++++ .github/workflows/deploy.yml | 106 -------------------- accounts/admin.py | 47 +++++++-- activities/admin.py | 44 ++++++--- appspec-dev.yml | 16 +++ appspec-testnet.yml | 16 +++ base/settings.py | 14 ++- donations/admin.py | 9 ++ indexer_app/admin.py | 15 ++- indexer_app/handler.py | 12 ++- indexer_app/utils.py | 3 +- lists/admin.py | 70 +++++++++++--- pots/admin.py | 63 ++++++++++++ pots/utils.py | 4 +- scripts/after_install.sh | 64 +----------- scripts/after_install_dev.sh | 140 +++++++++++++++++++++++++++ scripts/after_install_testnet.sh | 67 +++++++++++++ tokens/admin.py | 34 +++++-- 19 files changed, 622 insertions(+), 219 deletions(-) create mode 100644 .github/workflows/deploy-dev.yml create mode 100644 .github/workflows/deploy-testnet.yml delete mode 100644 .github/workflows/deploy.yml create mode 100644 appspec-dev.yml create mode 100644 appspec-testnet.yml mode change 100755 => 100644 scripts/after_install.sh create mode 100755 scripts/after_install_dev.sh create mode 100755 scripts/after_install_testnet.sh diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml new file mode 100644 index 0000000..334d04e --- /dev/null +++ b/.github/workflows/deploy-dev.yml @@ -0,0 +1,51 @@ +name: Dev deploy to EC2 on Push + +on: + push: + branches: [dev] + +env: + AWS_REGION: "us-east-1" + +# Permission can be added at job level or workflow level +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout +jobs: + DeployToCodeDeploy: + runs-on: ubuntu-latest + steps: + - name: Git clone the repository + uses: actions/checkout@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1.7.0 + with: + role-to-assume: arn:aws:iam::471112976510:role/GitHubAction-AssumeRoleWithAction + role-session-name: GitHub_to_AWS_via_FederatedOIDC + aws-region: ${{ env.AWS_REGION }} + + - name: Generate appspec.yml for dev + run: cp appspec-dev.yml appspec.yml + + - name: Set environment variables + id: vars + run: | + echo "DATETIME=$(date +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_ENV + echo "FILENAME=django-indexer-dev-${DATETIME}.zip" >> $GITHUB_ENV + echo "S3_BUCKET=django-indexer-dev" >> $GITHUB_ENV + + - name: Create zip of repository + run: zip -r "${{ env.FILENAME }}" . + + - name: Upload repository to S3 + run: aws s3 cp "${{ env.FILENAME }}" "s3://${{ env.S3_BUCKET }}/" + + - name: Create CodeDeploy Deployment + id: deploy + run: | + aws deploy create-deployment \ + --application-name django-indexer \ + --deployment-group-name django-indexer-dev-group \ + --deployment-config-name CodeDeployDefault.AllAtOnce \ + --s3-location bucket=${{ env.S3_BUCKET }},bundleType=zip,key=${{ env.FILENAME }} diff --git a/.github/workflows/deploy-testnet.yml b/.github/workflows/deploy-testnet.yml new file mode 100644 index 0000000..d22df3c --- /dev/null +++ b/.github/workflows/deploy-testnet.yml @@ -0,0 +1,66 @@ +name: Testnet deploy to EC2 on Push + +on: + push: + branches: [testnet] + +env: + AWS_REGION: "us-east-1" + +# Permission can be added at job level or workflow level +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout +jobs: + DeployToCodeDeploy: + runs-on: ubuntu-latest + steps: + - name: Git clone the repository + uses: actions/checkout@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1.7.0 + with: + role-to-assume: arn:aws:iam::471112976510:role/GitHubAction-AssumeRoleWithAction + role-session-name: GitHub_to_AWS_via_FederatedOIDC + aws-region: ${{ env.AWS_REGION }} + + - name: Generate appspec.yml for testnet + run: cp appspec-testnet.yml appspec.yml + + - name: Set environment variables + id: vars + run: | + echo "DATETIME=$(date +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_ENV + echo "FILENAME=django-indexer-testnet-${DATETIME}.zip" >> $GITHUB_ENV + echo "S3_BUCKET=django-indexer-testnet" >> $GITHUB_ENV + + - name: Create zip of repository + run: zip -r "${{ env.FILENAME }}" . + + - name: Upload repository to S3 + run: aws s3 cp "${{ env.FILENAME }}" "s3://${{ env.S3_BUCKET }}/" + + - name: Create CodeDeploy Deployment + id: deploy + run: | + aws deploy create-deployment \ + --application-name django-indexer-testnet \ + --deployment-group-name django-indexer-testnet-group \ + --deployment-config-name CodeDeployDefault.AllAtOnce \ + --s3-location bucket=${{ env.S3_BUCKET }},bundleType=zip,key=${{ env.FILENAME }} + + # - name: Create zip of repository + # run: zip -r django-indexer-testnet.zip . + + # - name: Upload repository to S3 + # run: aws s3 cp django-indexer-testnet.zip s3://django-indexer-testnet/ + + # - name: Create CodeDeploy Deployment + # id: deploy + # run: | + # aws deploy create-deployment \ + # --application-name django-indexer-testnet \ + # --deployment-group-name django-indexer-testnet-group \ + # --deployment-config-name CodeDeployDefault.AllAtOnce \ + # --s3-location bucket=django-indexer-testnet,bundleType=zip,key=django-indexer-testnet.zip diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml deleted file mode 100644 index 554fb7f..0000000 --- a/.github/workflows/deploy.yml +++ /dev/null @@ -1,106 +0,0 @@ -name: Deploy to EC2 on Push - -on: - push: - branches: [dev] - -env: - AWS_REGION: "us-east-1" - -# Permission can be added at job level or workflow level -permissions: - id-token: write # This is required for requesting the JWT - contents: read # This is required for actions/checkout -jobs: - AssumeRoleAndCallIdentity: - runs-on: ubuntu-latest - steps: - - name: Git clone the repository - uses: actions/checkout@v3 - - name: configure aws credentials - uses: aws-actions/configure-aws-credentials@v1.7.0 - with: - role-to-assume: arn:aws:iam::471112976510:role/GitHubAction-AssumeRoleWithAction - role-session-name: GitHub_to_AWS_via_FederatedOIDC - aws-region: ${{ env.AWS_REGION }} - # Hello from AWS: WhoAmI - - name: Sts GetCallerIdentity - run: | - aws sts get-caller-identity - - # Step 3 - check the application-name and deployment group name - - name: Create CodeDeploy Deployment - id: deploy - run: | - aws deploy create-deployment \ - --application-name django-indexer \ - --deployment-group-name django-indexer-dev \ - --deployment-config-name CodeDeployDefault.AllAtOnce \ - --github-location repository=${{ github.repository }},commitId=${{ github.sha }} - -# name: Deploy to EC2 on Push - -# on: -# push: -# branches: -# - main -# - dev - -# jobs: -# deploy: -# runs-on: ubuntu-latest -# environment: -# name: ${{ github.ref == 'refs/heads/main' && 'prod' || 'dev' }} -# steps: -# - name: Checkout code -# uses: actions/checkout@v2 - -# - name: Set up SSH key -# uses: webfactory/ssh-agent@v0.5.3 -# with: -# ssh-private-key: ${{ secrets.EC2_SSH_PRIVATE_KEY }} - -# - name: Push new code to EC2 -# run: | -# rsync -avz --exclude '.git*' --exclude 'node_modules' ./ ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }}:/home/ec2-user/django-indexer -# echo "Code has been pushed to the EC2 instance." - -# - name: Check for pending migrations -# id: check_migrations -# run: | -# echo "Checking for pending migrations..." -# pending_migrations=$(ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py showmigrations --plan | grep '\[ \]'") -# echo "::set-output name=pending::${pending_migrations}" -# if [ -z "$pending_migrations" ]; then -# echo "No migrations found." -# else: -# echo "Migrations found, stopping services." - -# - name: Stop services if migrations are pending -# if: steps.check_migrations.outputs.pending -# run: | -# echo "Stopping services..." -# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl stop gunicorn.service indexer.service" - -# - name: Run migrations -# if: steps.check_migrations.outputs.pending -# run: | -# echo "Running migrations..." -# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py migrate" - -# - name: Run collectstatic -# run: | -# echo "Running collectstatic..." -# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "cd /home/ec2-user/django-indexer && source env/bin/activate && python manage.py collectstatic --noinput" - -# - name: Restart services if migrations were run -# if: steps.check_migrations.outputs.pending -# run: | -# echo "Restarting services after migrations..." -# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service indexer.service" - -# - name: Restart services if no migrations -# if: steps.check_migrations.outputs.pending == '' -# run: | -# echo "Restarting services without migration..." -# ssh -o "StrictHostKeyChecking=no" ${{ vars.EC2_USER }}@${{ vars.EC2_SSH_HOST }} "sudo systemctl restart gunicorn.service" diff --git a/accounts/admin.py b/accounts/admin.py index fb44fbe..3b8aca7 100644 --- a/accounts/admin.py +++ b/accounts/admin.py @@ -1,25 +1,52 @@ from django.contrib import admin + from .models import Account + @admin.register(Account) class AccountAdmin(admin.ModelAdmin): - list_display = ('id', 'total_donations_in_usd', 'total_donations_out_usd', 'total_matching_pool_allocations_usd', 'donors_count') - search_fields = ('id',) # Allow searching by account address - list_filter = ('total_donations_in_usd', 'total_donations_out_usd') # Filter by donation amounts - ordering = ('-total_donations_in_usd',) # Default ordering + list_display = ( + "id", + "total_donations_in_usd", + "total_donations_out_usd", + "total_matching_pool_allocations_usd", + "donors_count", + ) + search_fields = ("id",) # Allow searching by account address + list_filter = ( + "total_donations_in_usd", + "total_donations_out_usd", + ) # Filter by donation amounts + ordering = ("-total_donations_in_usd",) # Default ordering # Optionally, format decimal fields for better readability in the admin def total_donations_in_usd_display(self, obj): return "${:,.2f}".format(obj.total_donations_in_usd) - total_donations_in_usd_display.admin_order_field = 'total_donations_in_usd' - total_donations_in_usd_display.short_description = 'Total Donations Received (USD)' + + total_donations_in_usd_display.admin_order_field = "total_donations_in_usd" + total_donations_in_usd_display.short_description = "Total Donations Received (USD)" def total_donations_out_usd_display(self, obj): return "${:,.2f}".format(obj.total_donations_out_usd) - total_donations_out_usd_display.admin_order_field = 'total_donations_out_usd' - total_donations_out_usd_display.short_description = 'Total Donations Sent (USD)' + + total_donations_out_usd_display.admin_order_field = "total_donations_out_usd" + total_donations_out_usd_display.short_description = "Total Donations Sent (USD)" def total_matching_pool_allocations_usd_display(self, obj): return "${:,.2f}".format(obj.total_matching_pool_allocations_usd) - total_matching_pool_allocations_usd_display.admin_order_field = 'total_matching_pool_allocations_usd' - total_matching_pool_allocations_usd_display.short_description = 'Total Matching Pool Allocations (USD)' + + total_matching_pool_allocations_usd_display.admin_order_field = ( + "total_matching_pool_allocations_usd" + ) + total_matching_pool_allocations_usd_display.short_description = ( + "Total Matching Pool Allocations (USD)" + ) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/activities/admin.py b/activities/admin.py index cab9f72..219e369 100644 --- a/activities/admin.py +++ b/activities/admin.py @@ -1,30 +1,52 @@ from django.contrib import admin from django.utils.html import format_html -from .models import Activity, Account + +from .models import Account, Activity + @admin.register(Activity) class ActivityAdmin(admin.ModelAdmin): - list_display = ('id', 'signer_address', 'receiver_address', 'timestamp', 'type', 'transaction_link', 'action_result') - list_filter = ('timestamp', 'type', 'signer', 'receiver') - search_fields = ('signer__id', 'receiver__id', 'tx_hash') - date_hierarchy = 'timestamp' - ordering = ('-timestamp',) + list_display = ( + "id", + "signer_address", + "receiver_address", + "timestamp", + "type", + "transaction_link", + "action_result", + ) + list_filter = ("timestamp", "type", "signer", "receiver") + search_fields = ("signer__id", "receiver__id", "tx_hash") + date_hierarchy = "timestamp" + ordering = ("-timestamp",) def signer_address(self, obj): return obj.signer.id - signer_address.admin_order_field = 'signer' - signer_address.short_description = 'Signer Address' + + signer_address.admin_order_field = "signer" + signer_address.short_description = "Signer Address" def receiver_address(self, obj): return obj.receiver.id - receiver_address.admin_order_field = 'receiver' - receiver_address.short_description = 'Receiver Address' + + receiver_address.admin_order_field = "receiver" + receiver_address.short_description = "Receiver Address" def transaction_link(self, obj): url = f"https://nearblocks.io?query={obj.tx_hash}" return format_html('{}', url, obj.tx_hash) - transaction_link.short_description = 'Transaction Hash' # Sets the column header + + transaction_link.short_description = "Transaction Hash" # Sets the column header # def action_result_summary(self, obj): # return "Has Result" if obj.action_result else "No Result" # action_result_summary.short_description = 'Action Result' + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/appspec-dev.yml b/appspec-dev.yml new file mode 100644 index 0000000..192e4cf --- /dev/null +++ b/appspec-dev.yml @@ -0,0 +1,16 @@ +version: 0.0 +os: linux +files: + - source: / + destination: /home/ec2-user/django-indexer +hooks: + # # Install: + AfterInstall: + - location: scripts/after_install_dev.sh + timeout: 300 + runas: ec2-user +# ApplicationStart: +# - location: scripts/application_start.sh +# timeout: 300 +# runas: root +# # ValidateService: diff --git a/appspec-testnet.yml b/appspec-testnet.yml new file mode 100644 index 0000000..b006f0e --- /dev/null +++ b/appspec-testnet.yml @@ -0,0 +1,16 @@ +version: 0.0 +os: linux +files: + - source: / + destination: /home/ec2-user/django-indexer-testnet +hooks: + # # Install: + AfterInstall: + - location: scripts/after_install_testnet.sh + timeout: 300 + runas: ec2-user +# ApplicationStart: +# - location: scripts/application_start.sh +# timeout: 300 +# runas: root +# # ValidateService: diff --git a/base/settings.py b/base/settings.py index ce99c04..7525f40 100644 --- a/base/settings.py +++ b/base/settings.py @@ -30,7 +30,12 @@ # TODO: update before prod release SECRET_KEY = "django-insecure-=r_v_es6w6rxv42^#kc2hca6p%=fe_*cog_5!t%19zea!enlju" -ALLOWED_HOSTS = ["ec2-100-27-57-47.compute-1.amazonaws.com", "127.0.0.1"] +ALLOWED_HOSTS = [ + "ec2-100-27-57-47.compute-1.amazonaws.com", + "127.0.0.1", + "dev.potlock.io", + "test-dev.potlock.io", +] # Env vars AWS_ACCESS_KEY_ID = os.environ.get("PL_AWS_ACCESS_KEY_ID") @@ -52,6 +57,8 @@ REDIS_PORT = os.environ.get("PL_REDIS_PORT", 6379) SENTRY_DSN = os.environ.get("PL_SENTRY_DSN") +POTLOCK_TLA = "potlock.testnet" if ENVIRONMENT == "testnet" else "potlock.near" + BLOCK_SAVE_HEIGHT = os.environ.get("BLOCK_SAVE_HEIGHT") COINGECKO_URL = ( @@ -213,6 +220,9 @@ log_level = getattr(logging, LOG_LEVEL, logging.INFO) # print("LOG_LEVEL: ", LOG_LEVEL) +# Set log group name based on environment +log_group_name = f"django-indexer-{ENVIRONMENT}" + # Setting up the logging configuration LOGGING = { "version": 1, @@ -254,7 +264,7 @@ LOGGING["handlers"]["watchtower"] = { "class": "watchtower.CloudWatchLogHandler", "boto3_client": boto3_logs_client, - "log_group_name": "django-indexer", + "log_group_name": log_group_name, "formatter": "standard", "level": log_level, } diff --git a/donations/admin.py b/donations/admin.py index 43f87d3..f27994d 100644 --- a/donations/admin.py +++ b/donations/admin.py @@ -59,3 +59,12 @@ def formfield_for_dbfield(self, db_field, request, **kwargs): field.widget.format = "%d-%m-%Y %H:%M" field.widget.attrs.update({"class": "vDateField", "size": "20"}) return field + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/indexer_app/admin.py b/indexer_app/admin.py index 3b56656..e94e67a 100644 --- a/indexer_app/admin.py +++ b/indexer_app/admin.py @@ -1,7 +1,18 @@ from django.contrib import admin + from .models import BlockHeight + @admin.register(BlockHeight) class BlockHeightAdmin(admin.ModelAdmin): - list_display = ('id', 'block_height', 'updated_at') - ordering = ('-updated_at',) + list_display = ("id", "block_height", "updated_at") + ordering = ("-updated_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/indexer_app/handler.py b/indexer_app/handler.py index 9d86fbf..0cdfa99 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -2,6 +2,7 @@ import json from datetime import datetime +from django.conf import settings from django.core.cache import cache from near_lake_framework import near_primitives @@ -49,8 +50,9 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess for shard in streamer_message.shards: for receipt_execution_outcome in shard.receipt_execution_outcomes: # we only want to proceed if it's a potlock tx and it succeeded.... (unreadable if statement?) + lists_contract = "lists." + settings.POTLOCK_TLA if not receipt_execution_outcome.receipt.receiver_id.endswith( - "potlock.near" + settings.POTLOCK_TLA ) or ( "SuccessReceiptId" not in receipt_execution_outcome.execution_outcome.outcome.status @@ -232,7 +234,7 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess logger.info( f"registrations incoming: {args_dict}, {action}" ) - if receiver_id != "lists.potlock.near": + if receiver_id != lists_contract: break await handle_new_list_registration( args_dict, receiver_id, signer_id, receipt, status_obj @@ -306,7 +308,7 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess "owner_remove_admins" ): # TODO: use update_admins event instead of method call to handle all cases logger.info(f"attempting to remove admins....: {args_dict}") - if receiver_id != "lists.potlock.near": + if receiver_id != lists_contract: break await handle_list_admin_removal( args_dict, receiver_id, signer_id, receipt.receipt_id @@ -315,14 +317,14 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess case "create_list": logger.info(f"creating list... {args_dict}, {action}") - if receiver_id != "lists.potlock.near": + if receiver_id != lists_contract: break await handle_new_list(signer_id, receiver_id, status_obj) break case "upvote": logger.info(f"up voting... {args_dict}") - if receiver_id != "lists.potlock.near": + if receiver_id != lists_contract: break await handle_list_upvote( args_dict, receiver_id, signer_id, receipt.receipt_id diff --git a/indexer_app/utils.py b/indexer_app/utils.py index 7e44c2f..5f394a3 100644 --- a/indexer_app/utils.py +++ b/indexer_app/utils.py @@ -633,10 +633,11 @@ async def handle_new_donations( log_data: list, ): logger.info(f"new donation data: {data}, {receiverId}") + donate_contract_addr = "donate." + settings.POTLOCK_TLA if ( actionName == "direct" - ) and receiverId == "donate.potlock.near": # early pot donations followed similarly to direct donations i.e they returned result instead of events. + ) and receiverId == donate_contract_addr: # early pot donations followed similarly to direct donations i.e they returned result instead of events. logger.info("calling donate contract...") # Handle direct donation diff --git a/lists/admin.py b/lists/admin.py index ae08c51..db244ed 100644 --- a/lists/admin.py +++ b/lists/admin.py @@ -1,22 +1,68 @@ from django.contrib import admin -from .models import List, ListUpvote, ListRegistration, Account + +from .models import Account, List, ListRegistration, ListUpvote + @admin.register(List) class ListAdmin(admin.ModelAdmin): - list_display = ('id', 'name', 'owner', 'default_registration_status', 'created_at', 'updated_at') - list_filter = ('created_at', 'updated_at', 'default_registration_status') - search_fields = ('name', 'owner__id') - ordering = ('-created_at',) + list_display = ( + "id", + "name", + "owner", + "default_registration_status", + "created_at", + "updated_at", + ) + list_filter = ("created_at", "updated_at", "default_registration_status") + search_fields = ("name", "owner__id") + ordering = ("-created_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(ListUpvote) class ListUpvoteAdmin(admin.ModelAdmin): - list_display = ('id', 'list', 'account', 'created_at') - list_filter = ('created_at',) - search_fields = ('list__name', 'account__id') + list_display = ("id", "list", "account", "created_at") + list_filter = ("created_at",) + search_fields = ("list__name", "account__id") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(ListRegistration) class ListRegistrationAdmin(admin.ModelAdmin): - list_display = ('id', 'list', 'registrant', 'registered_by', 'status', 'submitted_at', 'updated_at') - list_filter = ('status', 'submitted_at', 'updated_at') - search_fields = ('list__name', 'registrant__id', 'registered_by__id') - ordering = ('-submitted_at',) + list_display = ( + "id", + "list", + "registrant", + "registered_by", + "status", + "submitted_at", + "updated_at", + ) + list_filter = ("status", "submitted_at", "updated_at") + search_fields = ("list__name", "registrant__id", "registered_by__id") + ordering = ("-submitted_at",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/pots/admin.py b/pots/admin.py index 09e40b3..aca414b 100644 --- a/pots/admin.py +++ b/pots/admin.py @@ -46,6 +46,15 @@ def get_form(self, request, obj=None, **kwargs): ) return form + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + class PotForm(forms.ModelForm): class Meta: @@ -73,6 +82,15 @@ def get_form(self, request, obj=None, **kwargs): form.base_fields["admins"].queryset = obj.admins.all() return form + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotApplication) class PotApplicationAdmin(admin.ModelAdmin): @@ -80,6 +98,15 @@ class PotApplicationAdmin(admin.ModelAdmin): search_fields = ("pot__id", "applicant__id") list_filter = ("status", "submitted_at") + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotApplicationReview) class PotApplicationReviewAdmin(admin.ModelAdmin): @@ -95,6 +122,15 @@ class PotApplicationReviewAdmin(admin.ModelAdmin): search_fields = ("application__id", "reviewer__id") list_filter = ("status", "reviewed_at") + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotPayout) class PotPayoutAdmin(admin.ModelAdmin): @@ -102,6 +138,15 @@ class PotPayoutAdmin(admin.ModelAdmin): search_fields = ("pot__id", "recipient__id") list_filter = ("paid_at",) + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotPayoutChallenge) class PotPayoutChallengeAdmin(admin.ModelAdmin): @@ -109,9 +154,27 @@ class PotPayoutChallengeAdmin(admin.ModelAdmin): search_fields = ("challenger__id", "pot__id") list_filter = ("created_at",) + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(PotPayoutChallengeAdminResponse) class PotPayoutChallengeAdminResponseAdmin(admin.ModelAdmin): list_display = ("id", "pot", "admin", "created_at", "resolved") search_fields = ("admin__id", "challenge__id") list_filter = ("created_at", "resolved") + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/pots/utils.py b/pots/utils.py index 01bfab6..1a5f991 100644 --- a/pots/utils.py +++ b/pots/utils.py @@ -4,12 +4,12 @@ def match_pot_factory_version_pattern(receiver): - """Matches the base pot factory version pattern without a subaccount.""" + """Matches the base pot factory version pattern without a subaccount. NB: does not currently handle testnet factory.""" pattern = f"^{BASE_PATTERN}" return bool(re.match(pattern, receiver)) def match_pot_subaccount_version_pattern(receiver): - """Matches the pot factory version pattern with a subaccount.""" + """Matches the pot factory version pattern with a subaccount. NB: does not currently handle testnet factory.""" pattern = f"^[a-zA-Z0-9_]+\.{BASE_PATTERN}" return bool(re.match(pattern, receiver)) diff --git a/scripts/after_install.sh b/scripts/after_install.sh old mode 100755 new mode 100644 index 6c56c5b..152ad3a --- a/scripts/after_install.sh +++ b/scripts/after_install.sh @@ -1,65 +1,5 @@ -# #!/bin/bash -# # Log output to a specific file -# LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" - -# echo -e "\n\n" >> "$LOG_FILE" -# echo "=========================================" >> "$LOG_FILE" -# echo "Running after_install.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" -# echo "=========================================" >> "$LOG_FILE" - -# # Load env vars -# source /home/ec2-user/.bashrc - -# # Set correct ownership recursively for project directory -# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" - -# # Set the necessary permissions -# sudo chmod -R 775 /home/ec2-user/django-indexer/ -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" - -# # Restart nginx to apply any configuration changes -# sudo systemctl restart nginx -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" - -# # Define the project directory -# PROJECT_DIR="/home/ec2-user/django-indexer" - -# # Navigate to the project directory -# cd "$PROJECT_DIR" - -# # Install dependencies using Poetry -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" -# poetry install >> "$LOG_FILE" -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" - -# # Log the full output of showmigrations to diagnose -# echo "Checking for pending migrations..." >> "$LOG_FILE" -# poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 - -# # Check for unapplied migrations -# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations - -# if [ "$PENDING_MIGRATIONS" -gt 0 ]; then -# echo "Migrations found; stopping services..." >> "$LOG_FILE" -# sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat - -# echo 'Applying migrations...' >> "$LOG_FILE" -# poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 - -# echo 'Starting services...' >> "$LOG_FILE" -# sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat -# else -# echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" -# poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 -# sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat -# fi - -# echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" - - - #!/bin/bash +# TODO: deprecate this (move to _dev & _testnet files) # Log output to a specific file LOG_FILE="/home/ec2-user/django-indexer/logs/deploy.log" @@ -124,4 +64,4 @@ else sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat fi -echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install.sh completed" >> "$LOG_FILE" \ No newline at end of file diff --git a/scripts/after_install_dev.sh b/scripts/after_install_dev.sh new file mode 100755 index 0000000..0cc5a0a --- /dev/null +++ b/scripts/after_install_dev.sh @@ -0,0 +1,140 @@ +#!/bin/bash +# Log output to a specific file +LOG_FILE="/home/ec2-user/django-indexer-dev/logs/deploy.log" + +echo -e "\n\n" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" +echo "Running after_install_dev.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" + +# Load env vars +export PL_ENVIRONMENT=dev +source /home/ec2-user/.bashrc + +# Set correct ownership recursively for project directory +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer-dev/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# Set the necessary permissions +sudo chmod -R 775 /home/ec2-user/django-indexer-dev/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# Restart nginx to apply any configuration changes +sudo systemctl restart nginx +echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# Define the project directory +PROJECT_DIR="/home/ec2-user/django-indexer-dev" + +# Navigate to the project directory +cd "$PROJECT_DIR" + +# Source the specific poetry virtual environment +source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" + +# Install dependencies using Poetry +echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +poetry install >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# Check if there are pending migrations and log the output +echo "Checking for pending migrations..." >> "$LOG_FILE" +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# Log the full output of showmigrations +echo "Checking for pending migrations..." >> "$LOG_FILE" +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# Check for unapplied migrations +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +if [ "$PENDING_MIGRATIONS" -gt 0 ]; then + echo "Migrations found; stopping services..." >> "$LOG_FILE" + sudo systemctl stop gunicorn-dev celery-indexer-worker-dev celery-beat-worker-dev celery-beat-dev + + echo 'Applying migrations...' >> "$LOG_FILE" + poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + + echo 'Starting services...' >> "$LOG_FILE" + sudo systemctl start gunicorn-dev celery-indexer-worker-dev celery-beat-worker-dev celery-beat-dev +else + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 + sudo systemctl restart gunicorn-dev celery-indexer-worker-dev celery-beat-worker-dev celery-beat-dev +fi + +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_dev.sh completed" >> "$LOG_FILE" + + + + +# #!/bin/bash +# # Log output to a specific file +# LOG_FILE="/home/ec2-user/django-indexer-dev/logs/deploy.log" + +# # print placeholder +# echo -e "\n THIS IS A PLACEHOLDER \n" >> "$LOG_FILE" + +# echo -e "\n\n" >> "$LOG_FILE" +# echo "=========================================" >> "$LOG_FILE" +# echo "Running after_install_dev.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +# echo "=========================================" >> "$LOG_FILE" + +# # Load env vars +# source /home/ec2-user/.bashrc + +# # Set correct ownership recursively for project directory +# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# # Set the necessary permissions +# sudo chmod -R 775 /home/ec2-user/django-indexer/ +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# # Restart nginx to apply any configuration changes +# sudo systemctl restart nginx +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# # Define the project directory +# PROJECT_DIR="/home/ec2-user/django-indexer" + +# # Navigate to the project directory +# cd "$PROJECT_DIR" + +# # Source the specific poetry virtual environment +# source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" # TODO: UPDATE THIS + +# # Install dependencies using Poetry +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +# poetry install >> "$LOG_FILE" +# echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# # Check if there are pending migrations and log the output +# echo "Checking for pending migrations..." >> "$LOG_FILE" +# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +# echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# # Log the full output of showmigrations +# echo "Checking for pending migrations..." >> "$LOG_FILE" +# poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# # Check for unapplied migrations +# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +# if [ "$PENDING_MIGRATIONS" -gt 0 ]; then +# echo "Migrations found; stopping services..." >> "$LOG_FILE" +# sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat + +# echo 'Applying migrations...' >> "$LOG_FILE" +# poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + +# echo 'Starting services...' >> "$LOG_FILE" +# sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat +# else +# echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" +# poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 +# sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat +# fi + +# echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_dev.sh completed" >> "$LOG_FILE" diff --git a/scripts/after_install_testnet.sh b/scripts/after_install_testnet.sh new file mode 100755 index 0000000..94c517b --- /dev/null +++ b/scripts/after_install_testnet.sh @@ -0,0 +1,67 @@ +#!/bin/bash +# Log output to a specific file +LOG_FILE="/home/ec2-user/django-indexer-testnet/logs/deploy.log" + +echo -e "\n\n" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" +echo "Running after_install_testnet.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" +echo "=========================================" >> "$LOG_FILE" + +# Load env vars +export PL_ENVIRONMENT=testnet +source /home/ec2-user/.bashrc + +# Set correct ownership recursively for project directory +sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer-testnet/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" + +# Set the necessary permissions +sudo chmod -R 775 /home/ec2-user/django-indexer-testnet/ +echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" + +# Restart nginx to apply any configuration changes +sudo systemctl restart nginx +echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" + +# Define the project directory +PROJECT_DIR="/home/ec2-user/django-indexer-testnet" + +# Navigate to the project directory +cd "$PROJECT_DIR" + +# Source the specific poetry virtual environment +source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-AhfQkQzj-py3.11/bin/activate" + +# Install dependencies using Poetry +echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" +poetry install >> "$LOG_FILE" +echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" + +# Check if there are pending migrations and log the output +echo "Checking for pending migrations..." >> "$LOG_FILE" +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout +echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" + +# Log the full output of showmigrations +echo "Checking for pending migrations..." >> "$LOG_FILE" +poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose + +# Check for unapplied migrations +PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations + +if [ "$PENDING_MIGRATIONS" -gt 0 ]; then + echo "Migrations found; stopping services..." >> "$LOG_FILE" + sudo systemctl stop gunicorn-testnet celery-indexer-worker-testnet celery-beat-worker-testnet celery-beat-testnet + + echo 'Applying migrations...' >> "$LOG_FILE" + poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 + + echo 'Starting services...' >> "$LOG_FILE" + sudo systemctl start gunicorn-testnet celery-indexer-worker-testnet celery-beat-worker-testnet celery-beat-testnet +else + echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" + poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 + sudo systemctl restart gunicorn-testnet celery-indexer-worker-testnet celery-beat-worker-testnet celery-beat-testnet +fi + +echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_testnet.sh completed" >> "$LOG_FILE" diff --git a/tokens/admin.py b/tokens/admin.py index 0ca45d6..f783399 100644 --- a/tokens/admin.py +++ b/tokens/admin.py @@ -1,18 +1,40 @@ from django.contrib import admin + from .models import Token, TokenHistoricalPrice + @admin.register(Token) class TokenAdmin(admin.ModelAdmin): - list_display = ('id', 'decimals', 'get_most_recent_price') - search_fields = ('id',) + list_display = ("id", "decimals", "get_most_recent_price") + search_fields = ("id",) def get_most_recent_price(self, obj): price = obj.get_most_recent_price() return price.price_usd if price else None - get_most_recent_price.short_description = 'Most Recent Price (USD)' + + get_most_recent_price.short_description = "Most Recent Price (USD)" + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + @admin.register(TokenHistoricalPrice) class TokenHistoricalPriceAdmin(admin.ModelAdmin): - list_display = ('token', 'timestamp', 'price_usd') - search_fields = ('token__id',) - list_filter = ('timestamp',) + list_display = ("token", "timestamp", "price_usd") + search_fields = ("token__id",) + list_filter = ("timestamp",) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False From a95d83e67019d69d1584559b7f859a9e0a8feb5c Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 31 May 2024 18:30:10 -0400 Subject: [PATCH 120/127] add debug logs --- scripts/after_install_dev.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/after_install_dev.sh b/scripts/after_install_dev.sh index 0cc5a0a..9ba2224 100755 --- a/scripts/after_install_dev.sh +++ b/scripts/after_install_dev.sh @@ -11,6 +11,9 @@ echo "=========================================" >> "$LOG_FILE" export PL_ENVIRONMENT=dev source /home/ec2-user/.bashrc +echo "PL_ENVIRONMENT: $PL_ENVIRONMENT" >> "$LOG_FILE" +echo "PL_POSTGRES_HOST: $PL_POSTGRES_HOST" >> "$LOG_FILE" + # Set correct ownership recursively for project directory sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer-dev/ echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" From cd2f0b246646e9c2d0af64d6692144ee2c599612 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 31 May 2024 18:31:23 -0400 Subject: [PATCH 121/127] add logs --- scripts/after_install_dev.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/after_install_dev.sh b/scripts/after_install_dev.sh index 9ba2224..e735389 100755 --- a/scripts/after_install_dev.sh +++ b/scripts/after_install_dev.sh @@ -14,6 +14,9 @@ source /home/ec2-user/.bashrc echo "PL_ENVIRONMENT: $PL_ENVIRONMENT" >> "$LOG_FILE" echo "PL_POSTGRES_HOST: $PL_POSTGRES_HOST" >> "$LOG_FILE" +echo "PL_ENVIRONMENT: $PL_ENVIRONMENT" >> "$LOG_FILE" +echo "PL_POSTGRES_HOST: $PL_POSTGRES_HOST" >> "$LOG_FILE" + # Set correct ownership recursively for project directory sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer-dev/ echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" From 7d770abb819111502acc4ed9884d4eaf22aa70a7 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 31 May 2024 18:40:55 -0400 Subject: [PATCH 122/127] fix appspec destination for dev --- appspec-dev.yml | 2 +- scripts/after_install_dev.sh | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/appspec-dev.yml b/appspec-dev.yml index 192e4cf..6cb73af 100644 --- a/appspec-dev.yml +++ b/appspec-dev.yml @@ -2,7 +2,7 @@ version: 0.0 os: linux files: - source: / - destination: /home/ec2-user/django-indexer + destination: /home/ec2-user/django-indexer-dev hooks: # # Install: AfterInstall: diff --git a/scripts/after_install_dev.sh b/scripts/after_install_dev.sh index e735389..0cc5a0a 100755 --- a/scripts/after_install_dev.sh +++ b/scripts/after_install_dev.sh @@ -11,12 +11,6 @@ echo "=========================================" >> "$LOG_FILE" export PL_ENVIRONMENT=dev source /home/ec2-user/.bashrc -echo "PL_ENVIRONMENT: $PL_ENVIRONMENT" >> "$LOG_FILE" -echo "PL_POSTGRES_HOST: $PL_POSTGRES_HOST" >> "$LOG_FILE" - -echo "PL_ENVIRONMENT: $PL_ENVIRONMENT" >> "$LOG_FILE" -echo "PL_POSTGRES_HOST: $PL_POSTGRES_HOST" >> "$LOG_FILE" - # Set correct ownership recursively for project directory sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer-dev/ echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" From 1d81df12587bbddb9a5cc3273c60597b12922452 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 31 May 2024 18:47:10 -0400 Subject: [PATCH 123/127] clean up destination directory before installing in deployment (only dev) --- appspec-dev.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/appspec-dev.yml b/appspec-dev.yml index 6cb73af..02a6c66 100644 --- a/appspec-dev.yml +++ b/appspec-dev.yml @@ -4,7 +4,10 @@ files: - source: / destination: /home/ec2-user/django-indexer-dev hooks: - # # Install: + BeforeInstall: + - location: "echo 'Cleaning up destination directory' && rm -rf /home/ec2-user/django-indexer-dev/*" + timeout: 300 + runas: ec2-user AfterInstall: - location: scripts/after_install_dev.sh timeout: 300 From 41217edbfcfd2d8cf6a8f224a363293c72bd6387 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Fri, 31 May 2024 18:50:52 -0400 Subject: [PATCH 124/127] add clean_destination scripts --- appspec-dev.yml | 2 +- appspec-testnet.yml | 4 ++++ scripts/clean_destination_dev.sh | 8 ++++++++ scripts/clean_destination_testnet.sh | 8 ++++++++ 4 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 scripts/clean_destination_dev.sh create mode 100644 scripts/clean_destination_testnet.sh diff --git a/appspec-dev.yml b/appspec-dev.yml index 02a6c66..f83a7fc 100644 --- a/appspec-dev.yml +++ b/appspec-dev.yml @@ -5,7 +5,7 @@ files: destination: /home/ec2-user/django-indexer-dev hooks: BeforeInstall: - - location: "echo 'Cleaning up destination directory' && rm -rf /home/ec2-user/django-indexer-dev/*" + - location: scripts/clean_destination_dev.sh timeout: 300 runas: ec2-user AfterInstall: diff --git a/appspec-testnet.yml b/appspec-testnet.yml index b006f0e..3ed0e62 100644 --- a/appspec-testnet.yml +++ b/appspec-testnet.yml @@ -5,6 +5,10 @@ files: destination: /home/ec2-user/django-indexer-testnet hooks: # # Install: + BeforeInstall: + - location: scripts/clean_destination_testnet.sh + timeout: 300 + runas: ec2-user AfterInstall: - location: scripts/after_install_testnet.sh timeout: 300 diff --git a/scripts/clean_destination_dev.sh b/scripts/clean_destination_dev.sh new file mode 100644 index 0000000..638c035 --- /dev/null +++ b/scripts/clean_destination_dev.sh @@ -0,0 +1,8 @@ +#!/bin/bash +# Directory to clean +DEST_DIR="/home/ec2-user/django-indexer-dev" + +# Delete all contents of the destination directory +if [ -d "$DEST_DIR" ]; then + rm -rf "${DEST_DIR:?}/*" +fi \ No newline at end of file diff --git a/scripts/clean_destination_testnet.sh b/scripts/clean_destination_testnet.sh new file mode 100644 index 0000000..62b020a --- /dev/null +++ b/scripts/clean_destination_testnet.sh @@ -0,0 +1,8 @@ +#!/bin/bash +# Directory to clean +DEST_DIR="/home/ec2-user/django-indexer-testnet" + +# Delete all contents of the destination directory +if [ -d "$DEST_DIR" ]; then + rm -rf "${DEST_DIR:?}/*" +fi \ No newline at end of file From 0bf1a26a9313c8eafb015c1480938d635e41359d Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Sat, 1 Jun 2024 11:15:38 -0400 Subject: [PATCH 125/127] remove BeforeInstall clean destination step --- appspec-dev.yml | 8 ++++---- appspec-testnet.yml | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/appspec-dev.yml b/appspec-dev.yml index f83a7fc..348d01b 100644 --- a/appspec-dev.yml +++ b/appspec-dev.yml @@ -4,10 +4,10 @@ files: - source: / destination: /home/ec2-user/django-indexer-dev hooks: - BeforeInstall: - - location: scripts/clean_destination_dev.sh - timeout: 300 - runas: ec2-user + # BeforeInstall: + # - location: scripts/clean_destination_dev.sh + # timeout: 300 + # runas: ec2-user AfterInstall: - location: scripts/after_install_dev.sh timeout: 300 diff --git a/appspec-testnet.yml b/appspec-testnet.yml index 3ed0e62..dafa210 100644 --- a/appspec-testnet.yml +++ b/appspec-testnet.yml @@ -5,10 +5,10 @@ files: destination: /home/ec2-user/django-indexer-testnet hooks: # # Install: - BeforeInstall: - - location: scripts/clean_destination_testnet.sh - timeout: 300 - runas: ec2-user + # BeforeInstall: + # - location: scripts/clean_destination_testnet.sh + # timeout: 300 + # runas: ec2-user AfterInstall: - location: scripts/after_install_testnet.sh timeout: 300 From 224125b4d33c78f887a1c03eb186aa8b0d1f8d7c Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 4 Jun 2024 10:41:54 -0400 Subject: [PATCH 126/127] Testnet (#27) * add deploy-testnet.yml * add testnet deployment workflow & appspec * testnet workflow debugging * upload appspec as artifact * upload zipped codebase to s3 in workflow * continue working on after_install_testnet * separate watchtower log groups by django env * add testnet potlock TLA to indexer handler * add POTLOCK_TLA to settings * add dev.potlock.io and test-dev.potlock.io to allowed hosts * remove admin dashboard edit access * remove admin edit permission for all models * add timestamp to deployment zip filename * add env vars to deploy-testnet * add after_install_dev script * add after_install debug logs * handle testnet potfactory & pot contract patterns * test throttle rate * update throttling to 100/minute --- base/settings.py | 68 +++++---------------------------- indexer_app/handler.py | 11 ++---- pots/utils.py | 12 ++++-- scripts/after_install_dev.sh | 73 ------------------------------------ 4 files changed, 22 insertions(+), 142 deletions(-) diff --git a/base/settings.py b/base/settings.py index 7525f40..5125bbf 100644 --- a/base/settings.py +++ b/base/settings.py @@ -97,6 +97,14 @@ REST_FRAMEWORK = { "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination", "PAGE_SIZE": DEFAULT_PAGE_SIZE, + "DEFAULT_THROTTLE_CLASSES": [ + # "rest_framework.throttling.UserRateThrottle", + "rest_framework.throttling.AnonRateThrottle", + ], + "DEFAULT_THROTTLE_RATES": { + # "user": "100/day", + "anon": "100/minute", + }, } @@ -273,64 +281,8 @@ LOGGING["loggers"]["indexer"]["handlers"].append("watchtower") LOGGING["loggers"]["jobs"]["handlers"].append("watchtower") -# log_level = getattr(logging, LOG_LEVEL, logging.INFO) -# print("LOG_LEVEL: ", LOG_LEVEL) -# # print("log_level: ", log_level) - -# if ENVIRONMENT != "local": -# AWS_REGION_NAME = "us-east-1" -# boto3_logs_client = boto3.client("logs", region_name=AWS_REGION_NAME) - - -# LOGGING = { -# "version": 1, -# "disable_existing_loggers": False, -# "root": { -# "level": log_level, -# # Adding the watchtower handler here causes all loggers in the project that -# # have propagate=True (the default) to send messages to watchtower. If you -# # wish to send only from specific loggers instead, remove "watchtower" here -# # and configure individual loggers below. -# # "handlers": ["watchtower", "console"], -# "handlers": ["console"], -# }, -# "handlers": { -# "console": { -# "class": "logging.StreamHandler", -# }, -# # "watchtower": { -# # "class": "watchtower.CloudWatchLogHandler", -# # "boto3_client": boto3_logs_client, -# # "log_group_name": "django-indexer", -# # # Decrease the verbosity level here to send only those logs to watchtower, -# # # but still see more verbose logs in the console. See the watchtower -# # # documentation for other parameters that can be set here. -# # "level": log_level, -# # }, -# }, -# "loggers": { -# # In the debug server (`manage.py runserver`), several Django system loggers cause -# # deadlocks when using threading in the logging handler, and are not supported by -# # watchtower. This limitation does not apply when running on production WSGI servers -# # (gunicorn, uwsgi, etc.), so we recommend that you set `propagate=True` below in your -# # production-specific Django settings file to receive Django system logs in CloudWatch. -# "django": {"level": log_level, "handlers": ["console"], "propagate": False} -# # Add any other logger-specific configuration here. -# }, -# } - -# if ENVIRONMENT != "local": -# LOGGING["handlers"]["watchtower"] = { -# "class": "watchtower.CloudWatchLogHandler", -# "boto3_client": boto3_logs_client, -# "log_group_name": "django-indexer", -# # Decrease the verbosity level here to send only those logs to watchtower, -# # but still see more verbose logs in the console. See the watchtower -# # documentation for other parameters that can be set here. -# "level": log_level, -# } - -# LOGGING["root"]["handlers"].append("watchtower") + +## SENTRY CONFIG sentry_sdk.init( environment=ENVIRONMENT, diff --git a/indexer_app/handler.py b/indexer_app/handler.py index 0cdfa99..1981fb3 100644 --- a/indexer_app/handler.py +++ b/indexer_app/handler.py @@ -7,10 +7,7 @@ from near_lake_framework import near_primitives from base.utils import convert_ns_to_utc -from pots.utils import ( - match_pot_factory_version_pattern, - match_pot_subaccount_version_pattern, -) +from pots.utils import match_pot_factory_pattern, match_pot_subaccount_pattern from .logging import logger from .utils import ( @@ -129,14 +126,12 @@ async def handle_streamer_message(streamer_message: near_primitives.StreamerMess match method_name: case "new": - if match_pot_factory_version_pattern(receipt.receiver_id): + if match_pot_factory_pattern(receipt.receiver_id): logger.info(f"matched for factory pattern: {args_dict}") await handle_new_pot_factory( args_dict, receiver_id, created_at ) - elif match_pot_subaccount_version_pattern( - receipt.receiver_id - ): + elif match_pot_subaccount_pattern(receipt.receiver_id): logger.info( f"new pot deployment: {args_dict}, {action}" ) diff --git a/pots/utils.py b/pots/utils.py index 1a5f991..0d98146 100644 --- a/pots/utils.py +++ b/pots/utils.py @@ -1,15 +1,21 @@ import re -BASE_PATTERN = r"v\d+\.potfactory\.potlock\.near$" +from django.conf import settings +BASE_PATTERN = ( + r"^potlock\.testnet$" + if settings.ENVIRONMENT == "testnet" + else r"v\d+\.potfactory\.potlock\.near$" +) -def match_pot_factory_version_pattern(receiver): + +def match_pot_factory_pattern(receiver): """Matches the base pot factory version pattern without a subaccount. NB: does not currently handle testnet factory.""" pattern = f"^{BASE_PATTERN}" return bool(re.match(pattern, receiver)) -def match_pot_subaccount_version_pattern(receiver): +def match_pot_subaccount_pattern(receiver): """Matches the pot factory version pattern with a subaccount. NB: does not currently handle testnet factory.""" pattern = f"^[a-zA-Z0-9_]+\.{BASE_PATTERN}" return bool(re.match(pattern, receiver)) diff --git a/scripts/after_install_dev.sh b/scripts/after_install_dev.sh index 0cc5a0a..d9b54f1 100755 --- a/scripts/after_install_dev.sh +++ b/scripts/after_install_dev.sh @@ -65,76 +65,3 @@ else fi echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_dev.sh completed" >> "$LOG_FILE" - - - - -# #!/bin/bash -# # Log output to a specific file -# LOG_FILE="/home/ec2-user/django-indexer-dev/logs/deploy.log" - -# # print placeholder -# echo -e "\n THIS IS A PLACEHOLDER \n" >> "$LOG_FILE" - -# echo -e "\n\n" >> "$LOG_FILE" -# echo "=========================================" >> "$LOG_FILE" -# echo "Running after_install_dev.sh at $(date '+%Y-%m-%d %H:%M:%S')" >> "$LOG_FILE" -# echo "=========================================" >> "$LOG_FILE" - -# # Load env vars -# source /home/ec2-user/.bashrc - -# # Set correct ownership recursively for project directory -# sudo chown -R ec2-user:nginx /home/ec2-user/django-indexer/ -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Corrected ownership to ec2-user:nginx" >> "$LOG_FILE" - -# # Set the necessary permissions -# sudo chmod -R 775 /home/ec2-user/django-indexer/ -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Set permissions to 775" >> "$LOG_FILE" - -# # Restart nginx to apply any configuration changes -# sudo systemctl restart nginx -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Restarted nginx" >> "$LOG_FILE" - -# # Define the project directory -# PROJECT_DIR="/home/ec2-user/django-indexer" - -# # Navigate to the project directory -# cd "$PROJECT_DIR" - -# # Source the specific poetry virtual environment -# source "/home/ec2-user/.cache/pypoetry/virtualenvs/django-indexer-Y-SQFfhb-py3.11/bin/activate" # TODO: UPDATE THIS - -# # Install dependencies using Poetry -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Installing dependencies with Poetry" >> "$LOG_FILE" -# poetry install >> "$LOG_FILE" -# echo "$(date '+%Y-%m-%d %H:%M:%S') - Dependencies installed" >> "$LOG_FILE" - -# # Check if there are pending migrations and log the output -# echo "Checking for pending migrations..." >> "$LOG_FILE" -# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep '\[ \]' 2>&1) # Redirect stderr to stdout -# echo "Migration check output: $PENDING_MIGRATIONS" >> "$LOG_FILE" - -# # Log the full output of showmigrations -# echo "Checking for pending migrations..." >> "$LOG_FILE" -# poetry run python manage.py showmigrations >> "$LOG_FILE" 2>&1 # Logging full output to diagnose - -# # Check for unapplied migrations -# PENDING_MIGRATIONS=$(poetry run python manage.py showmigrations | grep "\[ \]" | wc -l) # Count unapplied migrations - -# if [ "$PENDING_MIGRATIONS" -gt 0 ]; then -# echo "Migrations found; stopping services..." >> "$LOG_FILE" -# sudo systemctl stop gunicorn celery-indexer-worker celery-beat-worker celery-beat - -# echo 'Applying migrations...' >> "$LOG_FILE" -# poetry run python manage.py migrate >> "$LOG_FILE" 2>&1 - -# echo 'Starting services...' >> "$LOG_FILE" -# sudo systemctl start gunicorn celery-indexer-worker celery-beat-worker celery-beat -# else -# echo 'No migrations found. Running collectstatic and restarting services...' >> "$LOG_FILE" -# poetry run python manage.py collectstatic --noinput >> "$LOG_FILE" 2>&1 -# sudo systemctl restart gunicorn celery-indexer-worker celery-beat-worker celery-beat -# fi - -# echo "$(date '+%Y-%m-%d %H:%M:%S') - after_install_dev.sh completed" >> "$LOG_FILE" From 2e159f95d120532d712cddf45ce5835af0101272 Mon Sep 17 00:00:00 2001 From: Lachlan Glen <54282009+lachlanglen@users.noreply.github.com> Date: Tue, 4 Jun 2024 10:47:38 -0400 Subject: [PATCH 127/127] Testnet (#28) * add deploy-testnet.yml * add testnet deployment workflow & appspec * testnet workflow debugging * upload appspec as artifact * upload zipped codebase to s3 in workflow * continue working on after_install_testnet * separate watchtower log groups by django env * add testnet potlock TLA to indexer handler * add POTLOCK_TLA to settings * add dev.potlock.io and test-dev.potlock.io to allowed hosts * remove admin dashboard edit access * remove admin edit permission for all models * add timestamp to deployment zip filename * add env vars to deploy-testnet * add after_install_dev script * add after_install debug logs * handle testnet potfactory & pot contract patterns * test throttle rate * update throttling to 100/minute * update readme with API urls --- README.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index e04c4f7..fc89206 100644 --- a/README.md +++ b/README.md @@ -3,10 +3,10 @@ - [Steps to run:](#steps-to-run) - [Env vars example](#env-vars-example) - [API Basics](#api-basics) - - [Base URL](#base-url) - - [Authorization](#authorization) - - [Error Responses](#error-responses) - - [Pagination](#pagination) + - [Base URL](#base-url) + - [Authorization](#authorization) + - [Error Responses](#error-responses) + - [Pagination](#pagination) - [API Endpoints](#api-endpoints) - [`Account` endpoints](#account-endpoints) - [✅ Get all accounts: `GET /accounts` (paginated)](#-get-all-accounts-get-accounts-paginated) @@ -52,6 +52,7 @@ - If for some reason this doesn't kill any active celery tasks, run `ps auxww | grep 'celery' | grep -v grep` and kill resulting PIDs Extra commands that might come in useful: + - Purge celery queue (`celery -A base purge`) ### Env vars example @@ -77,13 +78,14 @@ export PL_SENTRY_DSN= #### Base URL -`/api/v1/` +**dev (mainnet):** `https://dev.potlock.io/api/v1/` +**testnet:** `https://test-dev.potlock.io/api/v1/` #### Authorization This is a public, read-only API and as such does not currently implement authentication or authorization. -Rate limits of (FILL THIS IN) are enforced to ensure service for all users. +Rate limits of 100 requests/min are enforced to ensure service for all users. #### Error Responses