Merge branch 'atari' into fc.new_server

This commit is contained in:
Florin Chirica 2022-03-24 13:25:34 +02:00 committed by GitHub
commit 8711b741f4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
224 changed files with 5259 additions and 2516 deletions

View File

@ -137,7 +137,10 @@ jobs:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
if: steps.check_secrets.outputs.HAS_SECRET
run: |
aws s3 cp "$GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb" s3://download-chia-net/builds/
GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8)
CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH
echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV
aws s3 cp "$GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb" "s3://download.chia.net/dev/chia-blockchain_${CHIA_DEV_BUILD}_arm64.deb"
- name: Create Checksums
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
@ -158,7 +161,7 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.torrent --webseed https://download-chia-net.s3.us-west-2.amazonaws.com/install/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.torrent --webseed https://download.chia.net/install/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb
ls $GITHUB_WORKSPACE/build_scripts/final_installer/
- name: Upload Beta Installer
@ -166,8 +169,8 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb s3://download-chia-net/beta/chia-blockchain_arm64_latest_beta.deb
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 s3://download-chia-net/beta/chia-blockchain_arm64_latest_beta.deb.sha256
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb s3://download.chia.net/beta/chia-blockchain_arm64_latest_beta.deb
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 s3://download.chia.net/beta/chia-blockchain_arm64_latest_beta.deb.sha256
- name: Upload Release Files
if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/')
@ -175,9 +178,9 @@ jobs:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
ls $GITHUB_WORKSPACE/build_scripts/final_installer/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb s3://download-chia-net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 s3://download-chia-net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.torrent s3://download-chia-net/torrents/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb s3://download.chia.net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 s3://download.chia.net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.torrent s3://download.chia.net/torrents/
- name: Get tag name
if: startsWith(github.ref, 'refs/tags/')

View File

@ -177,8 +177,11 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8)
CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH
echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV
ls ${{ github.workspace }}/build_scripts/final_installer/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download-chia-net/builds/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/dev/chia-blockchain_${CHIA_DEV_BUILD}_amd64.deb
- name: Create Checksums
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
@ -199,7 +202,7 @@ jobs:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
if: startsWith(github.ref, 'refs/tags/')
run: |
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb -o ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.torrent --webseed https://download-chia-net.s3.us-west-2.amazonaws.com/install/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb -o ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.torrent --webseed https://download.chia.net/install/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb
ls
- name: Upload Beta Installer
@ -207,17 +210,17 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download-chia-net/beta/chia-blockchain_amd64_latest_beta.deb
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 s3://download-chia-net/beta/chia-blockchain_amd64_latest_beta.deb.sha256
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/beta/chia-blockchain_amd64_latest_beta.deb
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 s3://download.chia.net/beta/chia-blockchain_amd64_latest_beta.deb.sha256
- name: Upload Release Files
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/')
run: |
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download-chia-net/install/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 s3://download-chia-net/install/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.torrent s3://download-chia-net/torrents/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/install/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 s3://download.chia.net/install/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.torrent s3://download.chia.net/torrents/
- name: Get tag name
if: startsWith(github.ref, 'refs/tags/')

View File

@ -137,8 +137,11 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8)
CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH
echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV
ls $GITHUB_WORKSPACE/build_scripts/final_installer/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download-chia-net/builds/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/dev/chia-blockchain-${CHIA_DEV_BUILD}-1.x86_64.rpm
- name: Create Checksums
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
@ -159,7 +162,7 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent --webseed https://download-chia-net.s3.us-west-2.amazonaws.com/install/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent --webseed https://download.chia.net/install/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm
ls
- name: Upload Beta Installer
@ -167,17 +170,17 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download-chia-net/beta/chia-blockchain-1.x86_64_latest_beta.rpm
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download-chia-net/beta/chia-blockchain-1.x86_64_latest_beta.rpm.sha256
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/beta/chia-blockchain-1.x86_64_latest_beta.rpm
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/beta/chia-blockchain-1.x86_64_latest_beta.rpm.sha256
- name: Upload Release Files
if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/')
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download-chia-net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download-chia-net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent s3://download-chia-net/torrents/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/install/
aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent s3://download.chia.net/torrents/
- name: Get tag name
if: startsWith(github.ref, 'refs/tags/')

View File

@ -1,4 +1,4 @@
name: MacOS installer on Catalina and Python 3.8
name: MacOS Intel installer on Python 3.9
on:
push:
@ -17,7 +17,7 @@ concurrency:
jobs:
build:
name: MacOS installer on Catalina and Python 3.8
name: MacOS Intel Installer on Python 3.9
runs-on: ${{ matrix.os }}
timeout-minutes: 40
strategy:
@ -39,6 +39,31 @@ jobs:
- name: Cleanup any leftovers that exist from previous runs
run: bash build_scripts/clean-runner.sh || true
- name: Test for secrets access
id: check_secrets
shell: bash
run: |
unset HAS_APPLE_SECRET
unset HAS_AWS_SECRET
if [ -n "$APPLE_SECRET" ]; then HAS_APPLE_SECRET='true' ; fi
echo ::set-output name=HAS_APPLE_SECRET::${HAS_APPLE_SECRET}
if [ -n "$AWS_SECRET" ]; then HAS_AWS_SECRET='true' ; fi
echo ::set-output name=HAS_AWS_SECRET::${HAS_AWS_SECRET}
env:
APPLE_SECRET: "${{ secrets.APPLE_DEV_ID_APP }}"
AWS_SECRET: "${{ secrets.INSTALLER_UPLOAD_KEY }}"
- name: Create installer version number
id: version_number
run: |
python3 -m venv ../venv
. ../venv/bin/activate
pip install setuptools_scm
echo "::set-output name=CHIA_INSTALLER_VERSION::$(python3 ./build_scripts/installer-version.py)"
deactivate
- name: Setup Python environment
uses: actions/setup-python@v2
with:
@ -71,18 +96,8 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- name: Test for secrets access
id: check_secrets
shell: bash
run: |
unset HAS_SECRET
if [ -n "$SECRET" ]; then HAS_SECRET='true' ; fi
echo ::set-output name=HAS_SECRET::${HAS_SECRET}
env:
SECRET: "${{ secrets.APPLE_DEV_ID_APP }}"
- name: Import Apple app signing certificate
if: steps.check_secrets.outputs.HAS_SECRET
if: steps.check_secrets.outputs.HAS_APPLE_SECRET
uses: Apple-Actions/import-codesign-certs@v1
with:
p12-file-base64: ${{ secrets.APPLE_DEV_ID_APP }}
@ -121,9 +136,9 @@ jobs:
with:
node-version: '16.x'
- name: Build MacOS DMG in Catalina
- name: Build MacOS DMG
env:
NOTARIZE: ${{ steps.check_secrets.outputs.HAS_SECRET }}
NOTARIZE: ${{ steps.check_secrets.outputs.HAS_APPLE_SECRET }}
APPLE_NOTARIZE_USERNAME: "${{ secrets.APPLE_NOTARIZE_USERNAME }}"
APPLE_NOTARIZE_PASSWORD: "${{ secrets.APPLE_NOTARIZE_PASSWORD }}"
run: |
@ -136,5 +151,68 @@ jobs:
- name: Upload MacOS artifacts
uses: actions/upload-artifact@v2
with:
name: Chia-Installer-on-MacOS-10.15-dmg
name: Chia-Installer-MacOS-intel-dmg
path: ${{ github.workspace }}/build_scripts/final_installer/
- name: Create Checksums
run: |
ls
shasum -a 256 ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg > ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg.sha256
- name: Upload to s3
if: steps.check_secrets.outputs.HAS_AWS_SECRET
env:
AWS_ACCESS_KEY_ID: ${{ secrets.INSTALLER_UPLOAD_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }}
AWS_REGION: us-west-2
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8)
CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH
echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg s3://download.chia.net/builds/Chia-${CHIA_DEV_BUILD}.dmg
- name: Install py3createtorrent
if: startsWith(github.ref, 'refs/tags/')
run: |
pip install py3createtorrent
- name: Create torrent
if: startsWith(github.ref, 'refs/tags/')
run: |
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg -o ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg.torrent --webseed https://download.chia.net/install/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg
ls ${{ github.workspace }}/build_scripts/final_installer/
- name: Upload Beta Installer
if: steps.check_secrets.outputs.HAS_AWS_SECRET && github.ref == 'refs/heads/main'
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
AWS_ACCESS_KEY_ID: ${{ secrets.INSTALLER_UPLOAD_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }}
AWS_REGION: us-west-2
run: |
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg s3://download.chia.net/beta/Chia_latest_beta.dmg
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg.sha256 s3://download.chia.net/beta/Chia_latest_beta.dmg.sha256
- name: Upload Release Files
if: steps.check_secrets.outputs.HAS_AWS_SECRET && startsWith(github.ref, 'refs/tags/')
env:
AWS_ACCESS_KEY_ID: ${{ secrets.INSTALLER_UPLOAD_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }}
AWS_REGION: us-west-2
run: |
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg s3://download.chia.net/install/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg.sha256 s3://download.chia.net/install/
aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg.torrent s3://download.chia.net/torrents/
- name: Get tag name
if: startsWith(github.ref, 'refs/tags/')
id: tag-name
run: |
echo "::set-output name=TAG_NAME::$(echo ${{ github.ref }} | cut -d'/' -f 3)"
echo "::set-output name=REPO_NAME::$(echo ${{ github.repository }} | cut -d'/' -f 2)"
- name: Mark installer complete
if: startsWith(github.ref, 'refs/tags/')
run: |
curl -s -XPOST -H "Authorization: Bearer ${{ secrets.GLUE_ACCESS_TOKEN }}" --data '{"chia_ref": "${{ steps.tag-name.outputs.TAG_NAME }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/${{ steps.tag-name.outputs.REPO_NAME }}/${{ steps.tag-name.outputs.TAG_NAME }}/success/build-macos

View File

@ -144,8 +144,12 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.INSTALLER_UPLOAD_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }}
AWS_REGION: us-west-2
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg s3://download-chia-net/builds/
GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8)
CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH
echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${CHIA_INSTALLER_VERSION}-arm64.dmg s3://download.chia.net/dev/Chia-${CHIA_DEV_BUILD}-arm64.dmg
- name: Install py3createtorrent
if: startsWith(github.ref, 'refs/tags/')
@ -155,7 +159,7 @@ jobs:
- name: Create torrent
if: startsWith(github.ref, 'refs/tags/')
run: |
arch -arm64 py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg -o ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.torrent --webseed https://download-chia-net.s3.us-west-2.amazonaws.com/install/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg
arch -arm64 py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg -o ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.torrent --webseed https://download.chia.net/install/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg
ls ${{ github.workspace }}/build_scripts/final_installer/
- name: Upload Beta Installer
@ -166,8 +170,8 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }}
AWS_REGION: us-west-2
run: |
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg s3://download-chia-net/beta/Chia-arm64_latest_beta.dmg
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.sha256 s3://download-chia-net/beta/Chia-arm64_latest_beta.dmg.sha256
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg s3://download.chia.net/beta/Chia-arm64_latest_beta.dmg
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.sha256 s3://download.chia.net/beta/Chia-arm64_latest_beta.dmg.sha256
- name: Upload Release Files
if: steps.check_secrets.outputs.HAS_AWS_SECRET && startsWith(github.ref, 'refs/tags/')
@ -176,9 +180,9 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }}
AWS_REGION: us-west-2
run: |
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg s3://download-chia-net/install/
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.sha256 s3://download-chia-net/install/
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.torrent s3://download-chia-net/torrents/
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg s3://download.chia.net/install/
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.sha256 s3://download.chia.net/install/
arch -arm64 aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}-arm64.dmg.torrent s3://download.chia.net/torrents/
- name: Get tag name
if: startsWith(github.ref, 'refs/tags/')

View File

@ -0,0 +1,100 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: MacOS core-full_node-stores Tests
on:
push:
branches:
- main
tags:
- '**'
pull_request:
branches:
- '**'
concurrency:
# SHA is added to the end if on `main` to let all main workflows run
group: ${{ github.ref }}-${{ github.workflow }}-${{ github.event_name }}-${{ github.ref == 'refs/heads/main' && github.sha || '' }}
cancel-in-progress: true
jobs:
build:
name: MacOS core-full_node-stores Tests
runs-on: ${{ matrix.os }}
timeout-minutes: 40
strategy:
fail-fast: false
max-parallel: 4
matrix:
python-version: [3.8, 3.9]
os: [macOS-latest]
steps:
- name: Checkout Code
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Setup Python environment
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Create keychain for CI use
run: |
security create-keychain -p foo chiachain
security default-keychain -s chiachain
security unlock-keychain -p foo chiachain
security set-keychain-settings -t 7200 -u chiachain
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.6
with:
# Note that new runners may break this https://github.com/actions/cache/issues/292
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Checkout test blocks and plots
uses: actions/checkout@v2
with:
repository: 'Chia-Network/test-cache'
path: '.chia'
ref: '0.28.0'
fetch-depth: 1
- name: Link home directory
run: |
cd $HOME
ln -s $GITHUB_WORKSPACE/.chia
echo "$HOME/.chia"
ls -al $HOME/.chia
- name: Run install script
env:
INSTALL_PYTHON_VERSION: ${{ matrix.python-version }}
BUILD_VDF_CLIENT: "N"
run: |
brew install boost
sh install.sh -d
- name: Install timelord
run: |
. ./activate
sh install-timelord.sh
./vdf_bench square_asm 400000
- name: Test core-full_node-stores code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/full_node/stores/test_*.py -s -v --durations 0
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#

View File

@ -22,7 +22,7 @@ jobs:
build:
name: MacOS core-full_node Tests
runs-on: ${{ matrix.os }}
timeout-minutes: 80
timeout-minutes: 40
strategy:
fail-fast: false
max-parallel: 4

View File

@ -22,7 +22,7 @@ jobs:
build:
name: MacOS pools Tests
runs-on: ${{ matrix.os }}
timeout-minutes: 45
timeout-minutes: 60
strategy:
fail-fast: false
max-parallel: 4

View File

@ -99,8 +99,9 @@ jobs:
- name: Test blockchain code with pytest
run: |
. ./activate
./venv/bin/py.test tests/blockchain/test_*.py -s -v --durations 0
./venv/bin/py.test tests/blockchain/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -82,8 +82,9 @@ jobs:
- name: Test clvm code with pytest
run: |
. ./activate
./venv/bin/py.test tests/clvm/test_*.py -s -v --durations 0 -n auto
./venv/bin/py.test tests/clvm/test_*.py -s -v --durations 0 -n auto -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-cmds code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/cmds/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/cmds/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-consensus code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/consensus/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/consensus/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-custom_types code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/custom_types/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/custom_types/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-daemon code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/daemon/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/daemon/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-data_layer code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/data_layer/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/data_layer/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-full_node-full_sync code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/full_node/full_sync/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/full_node/full_sync/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -0,0 +1,111 @@
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#
name: Ubuntu core-full_node-stores Test
on:
push:
branches:
- main
tags:
- '**'
pull_request:
branches:
- '**'
concurrency:
# SHA is added to the end if on `main` to let all main workflows run
group: ${{ github.ref }}-${{ github.workflow }}-${{ github.event_name }}-${{ github.ref == 'refs/heads/main' && github.sha || '' }}
cancel-in-progress: true
jobs:
build:
name: Ubuntu core-full_node-stores Test
runs-on: ${{ matrix.os }}
timeout-minutes: 40
strategy:
fail-fast: false
max-parallel: 4
matrix:
python-version: [3.7, 3.8, 3.9]
os: [ubuntu-latest]
steps:
- name: Checkout Code
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Setup Python environment
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Cache npm
uses: actions/cache@v2.1.6
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache pip
uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Checkout test blocks and plots
uses: actions/checkout@v2
with:
repository: 'Chia-Network/test-cache'
path: '.chia'
ref: '0.28.0'
fetch-depth: 1
- name: Link home directory
run: |
cd $HOME
ln -s $GITHUB_WORKSPACE/.chia
echo "$HOME/.chia"
ls -al $HOME/.chia
- name: Install ubuntu dependencies
run: |
sudo apt-get install software-properties-common
sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get update
sudo apt-get install python${{ matrix.python-version }}-venv python${{ matrix.python-version }}-distutils git -y
- name: Run install script
env:
INSTALL_PYTHON_VERSION: ${{ matrix.python-version }}
run: |
sh install.sh -d
- name: Install timelord
run: |
. ./activate
sh install-timelord.sh
./vdf_bench square_asm 400000
- name: Test core-full_node-stores code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/full_node/stores/test_*.py -s -v --durations 0
- name: Check resource usage
run: |
sqlite3 -readonly -separator " " .pymon "select item,cpu_usage,total_time,mem_usage from TEST_METRICS order by mem_usage desc;" >metrics.out
./tests/check_pytest_monitor_output.py <metrics.out
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme
#

View File

@ -22,7 +22,7 @@ jobs:
build:
name: Ubuntu core-full_node Test
runs-on: ${{ matrix.os }}
timeout-minutes: 80
timeout-minutes: 40
strategy:
fail-fast: false
max-parallel: 4
@ -101,7 +101,6 @@ jobs:
. ./activate
./venv/bin/py.test tests/core/full_node/test_*.py -s -v --durations 0
- name: Check resource usage
run: |
sqlite3 -readonly -separator " " .pymon "select item,cpu_usage,total_time,mem_usage from TEST_METRICS order by mem_usage desc;" >metrics.out

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-server code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/server/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/server/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-ssl code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/ssl/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/ssl/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core-util code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/util/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/util/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test core code with pytest
run: |
. ./activate
./venv/bin/py.test tests/core/test_*.py -s -v --durations 0
./venv/bin/py.test tests/core/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test farmer_harvester code with pytest
run: |
. ./activate
./venv/bin/py.test tests/farmer_harvester/test_*.py -s -v --durations 0
./venv/bin/py.test tests/farmer_harvester/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test generator code with pytest
run: |
. ./activate
./venv/bin/py.test tests/generator/test_*.py -s -v --durations 0
./venv/bin/py.test tests/generator/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -95,8 +95,9 @@ jobs:
- name: Test plotting code with pytest
run: |
. ./activate
./venv/bin/py.test tests/plotting/test_*.py -s -v --durations 0
./venv/bin/py.test tests/plotting/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -22,7 +22,7 @@ jobs:
build:
name: Ubuntu pools Test
runs-on: ${{ matrix.os }}
timeout-minutes: 45
timeout-minutes: 60
strategy:
fail-fast: false
max-parallel: 4
@ -99,8 +99,9 @@ jobs:
- name: Test pools code with pytest
run: |
. ./activate
./venv/bin/py.test tests/pools/test_*.py -s -v --durations 0
./venv/bin/py.test tests/pools/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test simulation code with pytest
run: |
. ./activate
./venv/bin/py.test tests/simulation/test_*.py -s -v --durations 0
./venv/bin/py.test tests/simulation/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test tools code with pytest
run: |
. ./activate
./venv/bin/py.test tests/tools/test_*.py -s -v --durations 0
./venv/bin/py.test tests/tools/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test util code with pytest
run: |
. ./activate
./venv/bin/py.test tests/util/test_*.py -s -v --durations 0
./venv/bin/py.test tests/util/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet-cat_wallet code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/cat_wallet/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/cat_wallet/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet-db_wallet code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/db_wallet/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/db_wallet/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet-did_wallet code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/did_wallet/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/did_wallet/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet-rl_wallet code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/rl_wallet/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/rl_wallet/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet-rpc code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/rpc/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/rpc/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet-simple_sync code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/simple_sync/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/simple_sync/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet-sync code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/sync/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/sync/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test wallet code with pytest
run: |
. ./activate
./venv/bin/py.test tests/wallet/test_*.py -s -v --durations 0
./venv/bin/py.test tests/wallet/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -99,8 +99,9 @@ jobs:
- name: Test weight_proof code with pytest
run: |
. ./activate
./venv/bin/py.test tests/weight_proof/test_*.py -s -v --durations 0
./venv/bin/py.test tests/weight_proof/test_*.py -s -v --durations 0 -p no:monitor
# Omitted resource usage check
#
# THIS FILE IS GENERATED. SEE https://github.com/Chia-Network/chia-blockchain/tree/main/tests#readme

View File

@ -181,9 +181,14 @@ jobs:
if: steps.check_secrets.outputs.HAS_AWS_SECRET
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
shell: bash
run: |
ls ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe s3://download-chia-net/builds/
GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8)
CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH
echo ::set-output name=CHIA_DEV_BUILD::${CHIA_DEV_BUILD}
echo ${CHIA_DEV_BUILD}
pwd
aws s3 cp chia-blockchain-gui/release-builds/windows-installer/ChiaSetup-${CHIA_INSTALLER_VERSION}.exe s3://download.chia.net/dev/ChiaSetup-${CHIA_DEV_BUILD}.exe
- name: Create Checksums
env:
@ -200,7 +205,7 @@ jobs:
- name: Create torrent
if: startsWith(github.ref, 'refs/tags/')
run: |
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe -o ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.torrent --webseed https://download-chia-net.s3.us-west-2.amazonaws.com/install/ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe -o ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.torrent --webseed https://download.chia.net/install/ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe
ls
- name: Upload Beta Installer
@ -208,8 +213,8 @@ jobs:
env:
CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}
run: |
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe s3://download-chia-net/beta/ChiaSetup-latest-beta.exe
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.sha256 s3://download-chia-net/beta/ChiaSetup-latest-beta.exe.sha256
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe s3://download.chia.net/beta/ChiaSetup-latest-beta.exe
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.sha256 s3://download.chia.net/beta/ChiaSetup-latest-beta.exe.sha256
- name: Upload Release Files
if: steps.check_secrets.outputs.HAS_AWS_SECRET && startsWith(github.ref, 'refs/tags/')
@ -217,9 +222,9 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.INSTALLER_UPLOAD_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }}
run: |
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe s3://download-chia-net/install/
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.sha256 s3://download-chia-net/install/
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.torrent s3://download-chia-net/torrents/
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe s3://download.chia.net/install/
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.sha256 s3://download.chia.net/install/
aws s3 cp ${{ github.workspace }}\chia-blockchain-gui\release-builds\windows-installer\ChiaSetup-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.exe.torrent s3://download.chia.net/torrents/
- name: Get tag name
if: startsWith(github.ref, 'refs/tags/')

View File

@ -62,7 +62,6 @@ jobs:
DEFAULT_BRANCH: main
LINTER_RULES_PATH: .
MARKDOWN_CONFIG_FILE: .markdown-lint.yml
PYTHON_BLACK_CONFIG_FILE: pyproject.toml
PYTHON_FLAKE8_CONFIG_FILE: .flake8
PYTHON_ISORT_CONFIG_FILE: .isort.cfg
PYTHON_PYLINT_CONFIG_FILE: pylintrc
@ -77,7 +76,6 @@ jobs:
VALIDATE_POWERSHELL: true
VALIDATE_PYTHON_PYLINT: true
VALIDATE_PYTHON_FLAKE8: true
VALIDATE_PYTHON_BLACK: true
# VALIDATE_PYTHON_ISORT: true
VALIDATE_SHELL_SHFMT: true
VALIDATE_TYPESCRIPT_ES: true

View File

@ -64,7 +64,13 @@ jobs:
- name: arch:latest
type: arch
url: "docker://archlinux:latest"
# TODO: what CentOS version provides Python3.7-3.9?
- name: centos:7
type: centos
url: "docker://centos:7"
# commented out until we decide what to do with this, it fails consistently
# - name: centos:8
# type: centos
# url: "docker://centos:8"
- name: debian:buster
type: debian
# https://packages.debian.org/buster/python/python3 (3.7)
@ -89,6 +95,9 @@ jobs:
# type: fedora
# # (35, 3.10) https://packages.fedoraproject.org/search?query=python3&releases=Fedora+35&start=0
# url: "docker://fedora:35"
- name: rockylinux:8
type: rocky
url: "docker://rockylinux:8"
- name: ubuntu:bionic (18.04)
type: ubuntu
# https://packages.ubuntu.com/bionic/python3.7 (18.04, 3.7)
@ -117,6 +126,25 @@ jobs:
run: |
pacman --noconfirm --refresh base --sync git sudo
- name: Prepare CentOS
if: ${{ matrix.distribution.type == 'centos' }}
# Installing Git from yum brings git@1.x which doesn't work on actions/checkout.
# So install git@2.x from source
run: |
if [ "$(rpm --eval %{centos_ver})" = "8" ]; then
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-*;
fi
yum update -y
yum install -y sudo gcc autoconf make wget curl-devel expat-devel gettext-devel openssl-devel perl-devel zlib-devel
wget https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.29.0.tar.gz
tar zxf git-2.29.0.tar.gz
pushd git-2.29.0
make configure
./configure --prefix=/usr/local
make all
make install
popd
- name: Prepare Debian
if: ${{ matrix.distribution.type == 'debian' }}
env:
@ -130,6 +158,11 @@ jobs:
run: |
yum install --assumeyes git
- name: Prepare Rocky
if: ${{ matrix.distribution.type == 'rocky' }}
run: |
yum install --assumeyes git sudo
- name: Prepare Ubuntu
if: ${{ matrix.distribution.type == 'ubuntu' }}
env:

View File

@ -0,0 +1,31 @@
name: Trigger Dev Docker Build
on:
pull_request:
concurrency:
# SHA is added to the end if on `main` to let all main workflows run
group: ${{ github.ref }}-${{ github.workflow }}-${{ github.event_name }}-${{ github.ref == 'refs/heads/main' && github.sha || '' }}
cancel-in-progress: true
jobs:
trigger:
name: Trigger building a new dev tag for the chia-docker image
runs-on: ubuntu-latest
steps:
- name: Test for secrets access
id: check_secrets
shell: bash
run: |
unset HAS_SECRET
if [ -n "$GLUE_ACCESS_TOKEN" ]; then HAS_SECRET='true' ; fi
echo ::set-output name=HAS_SECRET::${HAS_SECRET}
env:
GLUE_ACCESS_TOKEN: "${{ secrets.GLUE_ACCESS_TOKEN }}"
- name: Trigger docker dev workflow via github-glue
if: steps.check_secrets.outputs.HAS_SECRET
run: |
curl -s -XPOST -H "Authorization: Bearer ${{ secrets.GLUE_ACCESS_TOKEN }}" --data '{"sha":"${{ github.sha }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/docker-build-dev/${{ github.sha }}/start
curl -s -XPOST -H "Authorization: Bearer ${{ secrets.GLUE_ACCESS_TOKEN }}" --data '{"sha":"${{ github.sha }}"}' ${{ secrets.GLUE_API_URL }}/api/v1/docker-build-dev/${{ github.sha }}/success/build-dev

View File

@ -48,6 +48,10 @@ jobs:
python -m pip install --upgrade pip setuptools wheel
python -m pip install .[dev]
- name: Lint source with black
run: |
black --check --diff .
- name: Lint source with flake8
run: |
flake8 chia tests

2
.gitignore vendored
View File

@ -92,6 +92,8 @@ build/
# data layer
**/dl_server_files*
# Temporal `n` (node version manager) directory
.n/
# pytest-monitor
# https://pytest-monitor.readthedocs.io/en/latest/operating.html?highlight=.pymon#storage

View File

@ -1,20 +1,262 @@
[settings]
profile=
; vertical hanging indent mode also used in black configuration
multi_line_output = 3
; necessary because black expect the trailing comma
include_trailing_comma = true
; black compatibility
force_grid_wrap = 0
; black compatibility
use_parentheses = True
; black compatibility
ensure_newline_before_comments = True
; we chose 120 as line length
line_length = 120
profile=black
skip_gitignore=true
# venv/bin/isort --check . |& sed -n "s;ERROR: ${PWD}/\(.*\) Imports are.*; \1;p" | sort | uniq
extend_skip=
benchmarks/block_store.py
benchmarks/coin_store.py
benchmarks/utils.py
chia/clvm/spend_sim.py
chia/cmds/chia.py
chia/cmds/data_funcs.py
chia/cmds/data.py
chia/cmds/db.py
chia/cmds/db_upgrade_func.py
chia/cmds/farm_funcs.py
chia/cmds/farm.py
chia/cmds/init_funcs.py
chia/cmds/init.py
chia/cmds/keys_funcs.py
chia/cmds/keys.py
chia/cmds/netspace.py
chia/cmds/passphrase_funcs.py
chia/cmds/passphrase.py
chia/cmds/plotnft_funcs.py
chia/cmds/plotnft.py
chia/cmds/plotters.py
chia/cmds/show.py
chia/cmds/start_funcs.py
chia/cmds/start.py
chia/cmds/wallet_funcs.py
chia/cmds/wallet.py
chia/daemon/keychain_proxy.py
chia/daemon/keychain_server.py
chia/daemon/server.py
chia/data_layer/benchmark.py
chia/data_layer/data_layer_errors.py
chia/data_layer/data_layer.py
chia/data_layer/data_layer_server.py
chia/data_layer/data_layer_wallet.py
chia/data_layer/data_store.py
chia/data_layer/dl_wallet_store.py
chia/data_layer/download_data.py
chia/farmer/farmer_api.py
chia/farmer/farmer.py
chia/full_node/block_height_map.py
chia/full_node/block_store.py
chia/full_node/bundle_tools.py
chia/full_node/coin_store.py
chia/full_node/full_node_api.py
chia/full_node/full_node.py
chia/full_node/generator.py
chia/full_node/hint_store.py
chia/full_node/lock_queue.py
chia/full_node/mempool_check_conditions.py
chia/full_node/mempool_manager.py
chia/full_node/weight_proof.py
chia/harvester/harvester_api.py
chia/harvester/harvester.py
chia/introducer/introducer.py
chia/plotters/bladebit.py
chia/plotters/chiapos.py
chia/plotters/install_plotter.py
chia/plotters/madmax.py
chia/plotters/plotters.py
chia/plotting/check_plots.py
chia/plotting/create_plots.py
chia/plotting/manager.py
chia/plotting/util.py
chia/pools/pool_puzzles.py
chia/pools/pool_wallet_info.py
chia/pools/pool_wallet.py
chia/protocols/harvester_protocol.py
chia/protocols/pool_protocol.py
chia/protocols/protocol_state_machine.py
chia/rpc/data_layer_rpc_api.py
chia/rpc/farmer_rpc_client.py
chia/rpc/full_node_rpc_client.py
chia/rpc/rpc_client.py
chia/rpc/wallet_rpc_api.py
chia/rpc/wallet_rpc_client.py
chia/seeder/crawler.py
chia/seeder/crawl_store.py
chia/seeder/dns_server.py
chia/server/address_manager_sqlite_store.py
chia/server/address_manager_store.py
chia/server/introducer_peers.py
chia/server/node_discovery.py
chia/server/peer_store_resolver.py
chia/server/reconnect_task.py
chia/server/start_data_layer.py
chia/server/start_service.py
chia/server/start_wallet.py
chia/simulator/simulator_constants.py
chia/simulator/start_simulator.py
chia/ssl/create_ssl.py
chia/timelord/timelord_launcher.py
chia/types/blockchain_format/program.py
chia/types/blockchain_format/proof_of_space.py
chia/types/blockchain_format/vdf.py
chia/types/coin_solution.py
chia/types/coin_spend.py
chia/types/full_block.py
chia/types/generator_types.py
chia/types/name_puzzle_condition.py
chia/types/spend_bundle.py
chia/util/bech32m.py
chia/util/byte_types.py
chia/util/chain_utils.py
chia/util/check_fork_next_block.py
chia/util/chia_logging.py
chia/util/condition_tools.py
chia/util/dump_keyring.py
chia/util/file_keyring.py
chia/util/files.py
chia/util/generator_tools.py
chia/util/keychain.py
chia/util/keyring_wrapper.py
chia/util/log_exceptions.py
chia/util/network.py
chia/util/profiler.py
chia/util/service_groups.py
chia/util/ssl_check.py
chia/util/streamable.py
chia/util/ws_message.py
chia/wallet/cat_wallet/cat_info.py
chia/wallet/cat_wallet/cat_utils.py
chia/wallet/cat_wallet/cat_wallet.py
chia/wallet/db_wallet/db_wallet_puzzles.py
chia/wallet/derive_keys.py
chia/wallet/did_wallet/did_info.py
chia/wallet/did_wallet/did_wallet_puzzles.py
chia/wallet/did_wallet/did_wallet.py
chia/wallet/dlo_wallet/dlo_wallet.py
chia/wallet/lineage_proof.py
chia/wallet/payment.py
chia/wallet/puzzles/genesis_checkers.py
chia/wallet/puzzles/load_clvm.py
chia/wallet/puzzles/prefarm/make_prefarm_ph.py
chia/wallet/puzzles/prefarm/spend_prefarm.py
chia/wallet/puzzles/puzzle_utils.py
chia/wallet/puzzles/singleton_top_layer.py
chia/wallet/puzzles/tails.py
chia/wallet/rl_wallet/rl_wallet.py
chia/wallet/sign_coin_spends.py
chia/wallet/trade_manager.py
chia/wallet/trade_record.py
chia/wallet/trading/offer.py
chia/wallet/trading/trade_store.py
chia/wallet/transaction_record.py
chia/wallet/util/compute_hints.py
chia/wallet/util/compute_memos.py
chia/wallet/util/debug_spend_bundle.py
chia/wallet/util/merkle_tree.py
chia/wallet/util/puzzle_compression.py
chia/wallet/util/wallet_sync_utils.py
chia/wallet/wallet_blockchain.py
chia/wallet/wallet_coin_store.py
chia/wallet/wallet_interested_store.py
chia/wallet/wallet_node_api.py
chia/wallet/wallet_node.py
chia/wallet/wallet_pool_store.py
chia/wallet/wallet.py
chia/wallet/wallet_state_manager.py
chia/wallet/wallet_weight_proof_handler.py
installhelper.py
tests/blockchain/test_blockchain.py
tests/blockchain/test_blockchain_transactions.py
tests/block_tools.py
tests/build-init-files.py
tests/build-workflows.py
tests/clvm/benchmark_costs.py
tests/clvm/coin_store.py
tests/clvm/test_chialisp_deserialization.py
tests/clvm/test_program.py
tests/clvm/test_puzzle_compression.py
tests/clvm/test_serialized_program.py
tests/clvm/test_singletons.py
tests/clvm/test_spend_sim.py
tests/conftest.py
tests/core/cmds/test_keys.py
tests/core/custom_types/test_coin.py
tests/core/custom_types/test_spend_bundle.py
tests/core/daemon/test_daemon.py
tests/core/data_layer/conftest.py
tests/core/data_layer/test_data_cli.py
tests/core/data_layer/test_data_rpc.py
tests/core/data_layer/test_data_store.py
tests/core/data_layer/util.py
tests/core/full_node/full_sync/test_full_sync.py
tests/core/full_node/ram_db.py
tests/core/full_node/stores/test_block_store.py
tests/core/full_node/stores/test_coin_store.py
tests/core/full_node/stores/test_full_node_store.py
tests/core/full_node/stores/test_hint_store.py
tests/core/full_node/test_block_height_map.py
tests/core/full_node/test_conditions.py
tests/core/full_node/test_full_node.py
tests/core/full_node/test_mempool_performance.py
tests/core/full_node/test_mempool.py
tests/core/full_node/test_performance.py
tests/core/server/test_dos.py
tests/core/server/test_rate_limits.py
tests/core/ssl/test_ssl.py
tests/core/test_daemon_rpc.py
tests/core/test_db_conversion.py
tests/core/test_filter.py
tests/core/test_full_node_rpc.py
tests/core/util/test_cached_bls.py
tests/core/util/test_config.py
tests/core/util/test_file_keyring_synchronization.py
tests/core/util/test_files.py
tests/core/util/test_keychain.py
tests/core/util/test_keyring_wrapper.py
tests/core/util/test_streamable.py
tests/generator/test_compression.py
tests/generator/test_generator_types.py
tests/generator/test_list_to_batches.py
tests/generator/test_rom.py
tests/generator/test_scan.py
tests/plotting/test_plot_manager.py
tests/plotting/util.py
tests/pools/test_pool_cmdline.py
tests/pools/test_pool_config.py
tests/pools/test_pool_puzzles_lifecycle.py
tests/pools/test_pool_rpc.py
tests/pools/test_wallet_pool_store.py
tests/setup_nodes.py
tests/simulation/test_simulation.py
tests/util/benchmark_cost.py
tests/util/blockchain.py
tests/util/build_network_protocol_files.py
tests/util/db_connection.py
tests/util/keyring.py
tests/util/key_tool.py
tests/util/misc.py
tests/util/network_protocol_data.py
tests/util/network.py
tests/util/test_lock_queue.py
tests/util/test_network_protocol_files.py
tests/util/test_struct_stream.py
tests/wallet/cat_wallet/test_cat_lifecycle.py
tests/wallet/cat_wallet/test_cat_wallet.py
tests/wallet/cat_wallet/test_offer_lifecycle.py
tests/wallet/db_wallet/test_db_clvm.py
tests/wallet/db_wallet/test_dl_wallet.py
tests/wallet/did_wallet/test_did.py
tests/wallet/did_wallet/test_did_rpc.py
tests/wallet/rpc/test_dl_wallet_rpc.py
tests/wallet/rpc/test_wallet_rpc.py
tests/wallet/simple_sync/test_simple_sync_protocol.py
tests/wallet/test_singleton_lifecycle_fast.py
tests/wallet/test_singleton_lifecycle.py
tests/wallet/test_singleton.py
tests/wallet/test_wallet_blockchain.py
tests/wallet/test_wallet_interested_store.py
tests/wallet/test_wallet_key_val_store.py
tests/wallet/test_wallet.py
tests/wallet_tools.py
tests/weight_proof/test_weight_proof.py
tools/analyze-chain.py
tools/run_block.py

View File

@ -14,6 +14,27 @@ repos:
language: python
pass_filenames: false
additional_dependencies: [click~=7.1]
# The following, commented hook is the usual way to add isort. However, it doesn't work in some environments.
# See https://github.com/PyCQA/isort/issues/1874#issuecomment-1002212936
# -----------------------------------------------------
# - repo: https://github.com/pycqa/isort
# rev: 5.9.3
# hooks:
# - id: isort
# -----------------------------------------------------
# The hook below is the workaround for the issue above.
- repo: local
hooks:
- id: isort
name: isort
entry: isort
require_serial: true
language: python
language_version: python3
types_or: [cython, pyi, python]
args: ['--filter-files']
minimum_pre_commit_version: '2.9.2'
additional_dependencies: [isort==5.10.1]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:
@ -25,7 +46,7 @@ repos:
- id: check-ast
- id: debug-statements
- repo: https://github.com/psf/black
rev: 21.8b0
rev: 21.12b0
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
@ -37,7 +58,7 @@ repos:
hooks:
- id: mypy
# TODO: see note in setup.py about types-click pinning
additional_dependencies: [pytest, types-aiofiles, types-click==0.1.12, types-setuptools, types-PyYAML]
additional_dependencies: [pytest, pytest-asyncio, types-aiofiles, types-click==0.1.12, types-setuptools, types-PyYAML]
# This intentionally counters the settings in mypy.ini to allow a loose local
# check and a strict CI check. This difference may or may not be retained long
# term.

View File

@ -6,6 +6,79 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html)
for setuptools_scm/PEP 440 reasons.
## 1.3.0 Beta Chia blockchain 2021-2-10
We at Chia have been working hard to bring all of our new features together into one easy-to-use release. Today, were proud to announce the beta release of our 1.3 client. Because this release is still in beta, we recommend that you only install it on non mission-critical systems. If you are running a large farm, you should wait for the full 1.3 release before upgrading. When will the full version of 1.3 be released? Soon.
### Added:
- CAT wallet support - add wallets for your favorite CATs
- Offers - make, take, and share your offers
- Integrated light wallet sync - to get you synced up faster while your full node syncs
- Wallet mode - Access just the wallet features to make and receive transactions
- Farmer mode - All your farming tools, and full node, while getting all the benefits of the upgraded wallet features
- New v2 DB - improved compression for smaller footprint
- Key derivation tool via CLI - lets you derive wallet addresses, child keys, and also search your keys for arbitrary wallet addresses/keys
- Light wallet data migration - CAT wallets you set up and your offer history will be carried over
- The farmer will report version info in User-Agent field for pool protocol (Thanks @FazendaPool)
- Added new RPC, get_version, to the daemon to return the version of Chia (Thanks @dkackman)
- Added new config.yaml setting, reserved_cores, to specify how many cores Chia will not use when launching process pools. Using 0 will allow Chia to use all cores for process pools. Set the default to 0 to allow Chia to use all cores. This can result in faster syncing and better performance overall especially on lower-end CPUs like the Raspberry Pi4.
- Added new RPC, get_logged_in_fingerprint, to the wallet to return the currently logged in fingerprint.
- Added new CLI option, chia keys derive, to allow deriving any number of keys in various ways. This is particularly useful to do an exhaustive search for a given address using chia keys derive search.
- Div soft fork block height set to 2,300,000
### Changed:
- Light wallet client sync updated to only require 3 peers instead of 5
- Only CATs from the default CAT list will be automatically added, all other unknown CATs will need to be manually added
- New sorting pattern for offer history - Open/pending offers sorted on top ordered by creation date > confirmation block height > trade id, and then Confirmed and Cancelled offers sorted by the same order
- When plotting multiple plots with the GUI, new items are taken from the top of the list instead of the bottom
- CA certificate store update
- VDF, chiapos, and blspy workflows updated to support python 3.10 wheels
- We now store peers and peer information in a serialized format instead of sqlite. The new files are called peers.dat and wallet_peers.dat. New settings peers_file_path and wallet_peers_file_path added to config.yaml.
- CLI option chia show will display the currently selected network (mainnet or testnet)
- CLI option chia plots check will display the Pool Contract Address for Portable (PlotNFT) plots
- Thanks to @cross for adding the ability to resolve IPv6 from hostnames in config.yaml. Added new config option prefer_ipv6 to toggle whether to resolve to IPv6 or IPv4. Default is false (IPv4)
- The default timeout when syncing the node was increased from 10 seconds to 30 seconds to avoid timing out when syncing from slower peers.
- TLS 1.2 is now the minimum required for all communication including peer-to-peer. The TLS 1.2 allowed cipher list is set to: "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256"
- In a future release the minimum TLS version will be set to TLS 1.3. A warning in the log will be emitted if the version of openssl in use does not support TLS 1.3. If supported, all local connections will be restricted to TLS 1.3.
- The new testnet is testnet10
- Switch to using npm ci from npm install in the GUI install scripts
- Improved sync performance of the full node by doing BLS validation in separate processes.
- Default log rotation was changed to 50MiB from 20MiB - added config.yaml setting log_maxbytesrotation to configure this.
- Thanks to @cross for an optimization to chiapos to use rename instead of copy if the tmp2 and final files are on the same filesystem.
- Updated to use chiapos 1.0.9
- Updated to use blspy 1.0.8
- Implemented a limit to the number of PlotNFTs a user can create - with the limit set to 20. This is to prevent users from incorrectly creating multiple PlotNFTs. This limit can be overridden for those users who have specific use cases that require more than 20 PlotNFTs
### Fixed:
- Offer history limit has been fixed to show all offers now instead of limiting to just 49 offers
- Fixed issues with using madmax CLI options -w, -G, -2, -t and -d (Issue 9163) (thanks @randomisresistance and @lasers8oclockday1)
- Fixed issues with CLI option passhrase-file (Issue 9032) (thanks @moonlitbugs)
- Fixed issues with displaying IPv6 address in CLI with chia show -c
- Thanks to @chuwt for fix to looping logic during node synching
- Fixed the chia-blockchain RPM to set the permission of chrome-sandbox properly
- Fixed issues where the wallet code would not generate enough addresses when looking for coins, which can result in missed coins due to the address not being checked. Deprecated the config setting initial_num_public_keys_new_wallet. The config setting initial_num_public_keys is now used in all cases.
- Thanks to @risner for fixes related to using colorlog
- Fixed issues in reading the pool_list from config if set to null
- Fixed display info in CLI chia show -c when No Info should be displayed
- Thanks to @madMAx42v3r for fixes in chiapos related to a possible race condition when multiple threads call Verifier::ValidateProof
- Thanks to @PastaPastaPasta for some compiler warning fixes in bls-signatures
- Thanks to @random-zebra for fixing a bug in the bls-signature copy assignment operator
- Thanks to @lourkeur for fixes in blspy related to pybind11 2.8+
- Thanks to @nioos-ledger with a fix to the python implementation of bls-signatures
### Known Issues:
- When you are adding plots and you choose the option to “create a Plot NFT”, you will get an error message “Initial_target_state” and the plots will not get created
- Workaround: Create the Plot NFT first in the “Pool” tab, and then add your plots and choose the created plot NFT in the drop down.
- If you are installing on a machine for the first time, when the GUI loads and you dont have any pre-existing wallet keys, the GUI will flicker and not load anything.
- Workaround: close and relaunch the GUI
- When you close the Chia app, regardless if you are in farmer mode or wallet, the content on the exit dialog isnt correct
- If you start with wallet mode and then switch to farmer mode and back to wallet mode, the full node will continue to sync in the background. To get the full node to stop syncing after switching to wallet mode, you will need to close the Chia and relaunch the Chia app.
- Wallets with large number of transactions or large number of coins will take longer to sync (more than a few minutes), but should take less time than a full node sync. It could fail in some cases.
## 1.2.11 Chia blockchain 2021-11-4
Farmers rejoice: today's release integrates two plotters in broad use in the Chia community: Bladebit, created by @harold-b, and Madmax, created by @madMAx43v3r. Both of these plotters bring significant improvements in plotting time. More plotting info [here](https://github.com/Chia-Network/chia-blockchain/wiki/Alternative--Plotters).

View File

@ -58,6 +58,7 @@ py.test tests -v --durations 0
The [black library](https://black.readthedocs.io/en/stable/) is used as an automatic style formatter to make things easier.
The [flake8 library](https://readthedocs.org/projects/flake8/) helps ensure consistent style.
The [Mypy library](https://mypy.readthedocs.io/en/stable/) is very useful for ensuring objects are of the correct type, so try to always add the type of the return value, and the type of local variables.
The [isort library](https://isort.readthedocs.io) is used to sort, group and validate imports in all python files.
If you want verbose logging for tests, edit the `tests/pytest.ini` file.

View File

@ -1,153 +0,0 @@
# Python package
# Create and test a Python package on multiple Python versions.
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
trigger:
branches:
include:
- "*"
tags:
include:
- "*"
pool:
vmImage: "macOS-10.15"
strategy:
matrix:
Mojave DMG:
python.version: "3.9"
variables:
- group: Apple-Notarize-Variables
- group: AWS-Upload-Credentials
- group: GlueVariables
- group: GitHub
steps:
- checkout: self # self represents the repo where the initial Azure Pipelines YAML file was found
submodules: recursive
fetchDepth: 0
displayName: "Checkout code"
- script: |
git config --global url."https://github.com/".insteadOf ssh://git@github.com/
displayName: "Use https for git instead of ssh"
- task: UsePythonVersion@0
inputs:
versionSpec: "$(python.version)"
displayName: "Use Python $(python.version)"
# Install Apple certificate
# Install an Apple certificate required to build on a macOS agent machine
- task: InstallAppleCertificate@2
inputs:
certSecureFile: 'chia-apple-dev-id-app.p12'
certPwd: $(CHIA_APPLE_DEV_ID_APP_PASS)
keychain: temp
deleteCert: true
- script: |
python3 -m venv ../venv
. ../venv/bin/activate
pip install setuptools_scm
touch $(System.DefaultWorkingDirectory)/build_scripts/version.txt
python ./build_scripts/installer-version.py > $(System.DefaultWorkingDirectory)/build_scripts/version.txt
cat $(System.DefaultWorkingDirectory)/build_scripts/version.txt
deactivate
displayName: Create installer version number
- script: |
set -o errexit -o pipefail
MADMAX_VERSION=$(curl -u "$(GithubUsername):$(GithubToken)" --fail --silent "https://api.github.com/repos/Chia-Network/chia-plotter-madmax/releases/latest" | jq -r '.tag_name')
mkdir "$(System.DefaultWorkingDirectory)/madmax"
wget -O "$(System.DefaultWorkingDirectory)/madmax/chia_plot" https://github.com/Chia-Network/chia-plotter-madmax/releases/download/${MADMAX_VERSION}/chia_plot-${MADMAX_VERSION}-macos-intel
wget -O "$(System.DefaultWorkingDirectory)/madmax/chia_plot_k34" https://github.com/Chia-Network/chia-plotter-madmax/releases/download/${MADMAX_VERSION}/chia_plot_k34-${MADMAX_VERSION}-macos-intel
chmod +x "$(System.DefaultWorkingDirectory)/madmax/chia_plot"
chmod +x "$(System.DefaultWorkingDirectory)/madmax/chia_plot_k34"
displayName: "Get latest madmax release"
- script: |
sh install.sh
displayName: "Install dependencies"
- task: NodeTool@0
inputs:
versionSpec: '16.x'
displayName: "Setup Node 16.x"
- bash: |
. ./activate
APPLE_NOTARIZE_USERNAME="$(APPLE_NOTARIZE_USERNAME)"
export APPLE_NOTARIZE_USERNAME
APPLE_NOTARIZE_PASSWORD="$(APPLE_NOTARIZE_PASSWORD)"
export APPLE_NOTARIZE_PASSWORD
if [ "$(APPLE_NOTARIZE_PASSWORD)" ]; then NOTARIZE="true"; else NOTARIZE="false"; fi
export NOTARIZE
cd build_scripts || exit
sh build_macos.sh
displayName: "Build DMG with build_scripts/build_macos.sh"
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(System.DefaultWorkingDirectory)/build_scripts/final_installer/
artifactName: MacOS-DMG
displayName: "Upload MacOS DMG"
- bash: |
ls $(System.DefaultWorkingDirectory)/build_scripts/final_installer/
cd $(System.DefaultWorkingDirectory)/build_scripts/
export CHIA_VERSION="Chia-"$(<version.txt)
shasum -a 256 $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg > $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg.sha256
ls $(System.DefaultWorkingDirectory)/build_scripts/final_installer/
displayName: "Create Checksums"
- bash: |
export AWS_ACCESS_KEY_ID=$(AccessKey)
export AWS_SECRET_ACCESS_KEY=$(SecretKey)
export AWS_DEFAULT_REGION=us-west-2
aws s3 cp $(System.DefaultWorkingDirectory)/build_scripts/final_installer/*.dmg s3://download-chia-net/builds/
displayName: "Upload to S3"
- bash: |
cd $(System.DefaultWorkingDirectory)/build_scripts/
export CHIA_VERSION="Chia-"$(<version.txt)
pip3 install py3createtorrent
py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg -o $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg.torrent --webseed https://download-chia-net.s3.us-west-2.amazonaws.com/install/$CHIA_VERSION.dmg
ls
displayName: "Create torrent file"
condition: and(succeeded(), contains(variables['build.sourceBranch'], 'refs/tags/'))
- bash: |
export AWS_ACCESS_KEY_ID=$(AccessKey)
export AWS_SECRET_ACCESS_KEY=$(SecretKey)
export AWS_DEFAULT_REGION=us-west-2
cd $(System.DefaultWorkingDirectory)/build_scripts/
export CHIA_VERSION="Chia-"$(<version.txt)
aws s3 cp $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg s3://download-chia-net/beta/Chia-intel_latest_beta.dmg
aws s3 cp $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg.sha256 s3://download-chia-net/beta/Chia-intel_latest_beta.dmg.sha256
displayName: "Upload Beta Files"
condition: and(succeeded(), contains(variables['build.sourceBranch'], 'refs/heads/main'))
- bash: |
export AWS_ACCESS_KEY_ID=$(AccessKey)
export AWS_SECRET_ACCESS_KEY=$(SecretKey)
export AWS_DEFAULT_REGION=us-west-2
cd $(System.DefaultWorkingDirectory)/build_scripts/
export CHIA_VERSION="Chia-"$(<version.txt)
aws s3 cp $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg s3://download-chia-net/install/
aws s3 cp $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg.sha256 s3://download-chia-net/install/
aws s3 cp $(System.DefaultWorkingDirectory)/build_scripts/final_installer/$CHIA_VERSION.dmg.torrent s3://download-chia-net/torrents/
displayName: "Upload Release Files"
condition: and(succeeded(), contains(variables['build.sourceBranch'], 'refs/tags/'))
- bash: |
cd $(System.DefaultWorkingDirectory)/build_scripts/
export CHIA_TAG=$(<version.txt)
export REPO_NAME=$(echo $(Build.Repository.Name) | cut -d'/' -f 2)
curl -s -XPOST -H "Authorization: Bearer $(GLUE_ACCESS_TOKEN)" --data "{\"chia_ref\": \"$CHIA_TAG\"}" $(GLUE_API_URL)/api/v1/${REPO_NAME}/${CHIA_TAG}/success/build-macos
displayName: "Mark installer complete"
condition: and(succeeded(), contains(variables['build.sourceBranch'], 'refs/tags/'))

View File

@ -1,13 +1,13 @@
import click
import aiosqlite
import asyncio
import time
import random
import os
from typing import Optional, List
from pathlib import Path
import random
from dataclasses import dataclass
from pathlib import Path
from time import monotonic
from typing import List, Optional
import aiosqlite
import click
from chia.consensus.blockchain import Blockchain
from chia.consensus.default_constants import DEFAULT_CONSTANTS
@ -62,7 +62,7 @@ async def main(db_path: Path):
hint_store = await HintStore.create(db_wrapper)
coin_store = await CoinStore.create(db_wrapper)
start_time = time.time()
start_time = monotonic()
# make configurable
reserved_cores = 4
blockchain = await Blockchain.create(
@ -78,9 +78,9 @@ async def main(db_path: Path):
random_refs(),
)
start_time = time.time()
start_time = monotonic()
gen = await blockchain.get_block_generator(block)
one_call = time.time() - start_time
one_call = monotonic() - start_time
timing += one_call
assert gen is not None

View File

@ -1,6 +1,6 @@
import asyncio
import random
from time import time
from time import monotonic
from pathlib import Path
from chia.full_node.block_store import BlockStore
import os
@ -8,9 +8,17 @@ import sys
from chia.util.db_wrapper import DBWrapper
from chia.util.ints import uint128, uint64, uint32, uint8
from chia.types.blockchain_format.classgroup import ClassgroupElement
from utils import rewards, rand_hash, setup_db, rand_g1, rand_g2, rand_bytes
from chia.types.blockchain_format.vdf import VDFInfo, VDFProof
from utils import (
rewards,
rand_hash,
setup_db,
rand_g1,
rand_g2,
rand_bytes,
rand_vdf,
rand_vdf_proof,
rand_class_group_element,
)
from chia.types.full_block import FullBlock
from chia.consensus.block_record import BlockRecord
from chia.types.blockchain_format.proof_of_space import ProofOfSpace
@ -28,22 +36,6 @@ NUM_ITERS = 20000
random.seed(123456789)
def rand_class_group_element() -> ClassgroupElement:
return ClassgroupElement(rand_bytes(100))
def rand_vdf() -> VDFInfo:
return VDFInfo(rand_hash(), uint64(random.randint(100000, 1000000000)), rand_class_group_element())
def rand_vdf_proof() -> VDFProof:
return VDFProof(
uint8(1), # witness_type
rand_hash(), # witness
bool(random.randint(0, 1)), # normalized_to_identity
)
with open("clvm_generator.bin", "rb") as f:
clvm_generator = f.read()
@ -224,14 +216,14 @@ async def run_add_block_benchmark(version: int):
sub_epoch_summary_included,
)
start = time()
start = monotonic()
await block_store.add_full_block(header_hash, full_block, record)
await block_store.set_in_chain([(header_hash,)])
header_hashes.append(header_hash)
await block_store.set_peak(header_hash)
await db_wrapper.db.commit()
stop = time()
stop = monotonic()
total_time += stop - start
# 19 seconds per block
@ -268,12 +260,12 @@ async def run_add_block_benchmark(version: int):
print("profiling get_full_block")
random.shuffle(header_hashes)
start = time()
start = monotonic()
for h in header_hashes:
block = await block_store.get_full_block(h)
assert block.header_hash == h
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_full_block")
@ -283,12 +275,12 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_full_block_bytes")
start = time()
start = monotonic()
for h in header_hashes:
block = await block_store.get_full_block_bytes(h)
assert len(block) > 0
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_full_block_bytes")
@ -298,13 +290,13 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_full_blocks_at")
start = time()
start = monotonic()
for h in range(1, block_height):
blocks = await block_store.get_full_blocks_at([h])
assert len(blocks) == 1
assert blocks[0].height == h
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_full_blocks_at")
@ -314,13 +306,13 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_block_records_by_hash")
start = time()
start = monotonic()
for h in header_hashes:
blocks = await block_store.get_block_records_by_hash([h])
assert len(blocks) == 1
assert blocks[0].header_hash == h
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_block_records_by_hash")
@ -330,13 +322,13 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_blocks_by_hash")
start = time()
start = monotonic()
for h in header_hashes:
blocks = await block_store.get_blocks_by_hash([h])
assert len(blocks) == 1
assert blocks[0].header_hash == h
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_blocks_by_hash")
@ -346,12 +338,12 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_block_record")
start = time()
start = monotonic()
for h in header_hashes:
blocks = await block_store.get_block_record(h)
assert blocks.header_hash == h
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_block_record")
@ -361,13 +353,13 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_block_records_in_range")
start = time()
start = monotonic()
for i in range(100):
h = random.randint(1, block_height - 100)
blocks = await block_store.get_block_records_in_range(h, h + 99)
assert len(blocks) == 100
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_block_records_in_range")
@ -377,11 +369,11 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_block_records_close_to_peak")
start = time()
start = monotonic()
blocks, peak = await block_store.get_block_records_close_to_peak(99)
assert len(blocks) == 100
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_block_records_close_to_peak")
@ -391,12 +383,12 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling is_fully_compactified")
start = time()
start = monotonic()
for h in header_hashes:
compactified = await block_store.is_fully_compactified(h)
assert compactified is False
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_block_record")
@ -406,11 +398,11 @@ async def run_add_block_benchmark(version: int):
if verbose:
print("profiling get_random_not_compactified")
start = time()
start = monotonic()
for i in range(1, 5000):
blocks = await block_store.get_random_not_compactified(100)
assert len(blocks) == 100
stop = time()
stop = monotonic()
total_time += stop - start
print(f"{total_time:0.4f}s, get_random_not_compactified")

View File

@ -1,6 +1,6 @@
import asyncio
import random
from time import time
from time import monotonic
from pathlib import Path
from chia.full_node.coin_store import CoinStore
from typing import List, Tuple
@ -109,7 +109,7 @@ async def run_new_block_benchmark(version: int):
all_unspent = all_unspent[100:]
total_remove += 100
start = time()
start = monotonic()
await coin_store.new_block(
height,
timestamp,
@ -118,7 +118,7 @@ async def run_new_block_benchmark(version: int):
removals,
)
await db_wrapper.db.commit()
stop = time()
stop = monotonic()
# 19 seconds per block
timestamp += 19
@ -160,7 +160,7 @@ async def run_new_block_benchmark(version: int):
all_unspent = all_unspent[700:]
total_remove += 700
start = time()
start = monotonic()
await coin_store.new_block(
height,
timestamp,
@ -170,7 +170,7 @@ async def run_new_block_benchmark(version: int):
)
await db_wrapper.db.commit()
stop = time()
stop = monotonic()
# 19 seconds per block
timestamp += 19
@ -210,7 +210,7 @@ async def run_new_block_benchmark(version: int):
all_unspent = all_unspent[2000:]
total_remove += 2000
start = time()
start = monotonic()
await coin_store.new_block(
height,
timestamp,
@ -219,7 +219,7 @@ async def run_new_block_benchmark(version: int):
removals,
)
await db_wrapper.db.commit()
stop = time()
stop = monotonic()
# 19 seconds per block
timestamp += 19
@ -242,9 +242,9 @@ async def run_new_block_benchmark(version: int):
found_coins = 0
for i in range(NUM_ITERS):
lookup = random.sample(all_coins, 200)
start = time()
start = monotonic()
records = await coin_store.get_coin_records_by_names(True, lookup)
total_time += time() - start
total_time += monotonic() - start
assert len(records) == 200
found_coins += len(records)
if verbose:
@ -265,9 +265,9 @@ async def run_new_block_benchmark(version: int):
found_coins = 0
for i in range(NUM_ITERS):
lookup = random.sample(all_coins, 200)
start = time()
start = monotonic()
records = await coin_store.get_coin_records_by_names(False, lookup)
total_time += time() - start
total_time += monotonic() - start
assert len(records) <= 200
found_coins += len(records)
if verbose:
@ -287,9 +287,9 @@ async def run_new_block_benchmark(version: int):
total_time = 0
found_coins = 0
for i in range(1, block_height):
start = time()
start = monotonic()
records = await coin_store.get_coins_removed_at_height(i)
total_time += time() - start
total_time += monotonic() - start
found_coins += len(records)
if verbose:
print(".", end="")

View File

@ -1,8 +1,10 @@
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.util.ints import uint64, uint32
from chia.util.ints import uint64, uint32, uint8
from chia.consensus.coinbase import create_farmer_coin, create_pool_coin
from chia.types.blockchain_format.classgroup import ClassgroupElement
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.blockchain_format.vdf import VDFInfo, VDFProof
from chia.util.db_wrapper import DBWrapper
from typing import Tuple
from pathlib import Path
@ -46,6 +48,24 @@ def rand_g2() -> G2Element:
return AugSchemeMPL.sign(sk, b"foobar")
def rand_class_group_element() -> ClassgroupElement:
# TODO: address hint errors and remove ignores
# error: Argument 1 to "ClassgroupElement" has incompatible type "bytes"; expected "bytes100" [arg-type]
return ClassgroupElement(rand_bytes(100)) # type: ignore[arg-type]
def rand_vdf() -> VDFInfo:
return VDFInfo(rand_hash(), uint64(random.randint(100000, 1000000000)), rand_class_group_element())
def rand_vdf_proof() -> VDFProof:
return VDFProof(
uint8(1), # witness_type
rand_hash(), # witness
bool(random.randint(0, 1)), # normalized_to_identity
)
async def setup_db(name: str, db_version: int) -> DBWrapper:
db_filename = Path(name)
try:

View File

@ -34,7 +34,7 @@ rm -rf dist
mkdir dist
echo "Create executables with pyinstaller"
pip install pyinstaller==4.5
pip install pyinstaller==4.9
SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)')
pyinstaller --log-level=INFO "$SPEC_FILE"
LAST_EXIT_CODE=$?

View File

@ -36,7 +36,7 @@ rm -rf dist
mkdir dist
echo "Create executables with pyinstaller"
pip install pyinstaller==4.5
pip install pyinstaller==4.9
SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)')
pyinstaller --log-level=INFO "$SPEC_FILE"
LAST_EXIT_CODE=$?

View File

@ -25,7 +25,7 @@ sudo rm -rf dist
mkdir dist
echo "Create executables with pyinstaller"
pip install pyinstaller==4.5
pip install pyinstaller==4.9
SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)')
pyinstaller --log-level=INFO "$SPEC_FILE"
LAST_EXIT_CODE=$?

View File

@ -25,7 +25,7 @@ sudo rm -rf dist
mkdir dist
echo "Install pyinstaller and build bootloaders for M1"
pip install pyinstaller==4.5
pip install pyinstaller==4.9
echo "Create executables with pyinstaller"
SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)')

View File

@ -30,7 +30,7 @@ python -m venv venv
python -m pip install --upgrade pip
pip install wheel pep517
pip install pywin32
pip install pyinstaller==4.5
pip install pyinstaller==4.9
pip install setuptools_scm
Write-Output " ---"

View File

@ -0,0 +1,13 @@
{
"name": "npm_global",
"version": "1.0.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"n": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/n/-/n-8.0.2.tgz",
"integrity": "sha512-IvKMeWenkEntHnktypexqIi1BCTQc0Po1+zBanui+flF4dwHtsV+B2WNkx6KAMCqlTHyIisSddj1Y7EbnKRgXQ=="
}
}
}

View File

@ -0,0 +1,15 @@
{
"name": "npm_global",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"n": "^8.0.2"
}
}

@ -1 +1 @@
Subproject commit 26646cbba6c4e04d390e1830e430252e81a33f87
Subproject commit 800a0f6556b89e928b1cf027c996b5ed010a7799

View File

@ -1,5 +1,5 @@
from pathlib import Path
from typing import Dict
from typing import Dict, Optional
import click
@ -19,6 +19,10 @@ def configure(
set_peer_count: str,
testnet: str,
peer_connect_timeout: str,
crawler_db_path: str,
crawler_minimum_version_count: Optional[int],
seeder_domain_name: str,
seeder_nameserver: str,
enable_data_server: str = "",
):
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml")
@ -103,6 +107,7 @@ def configure(
testnet_port = "58444"
testnet_introducer = "introducer-testnet10.chia.net"
testnet_dns_introducer = "dns-introducer-testnet10.chia.net"
bootstrap_peers = ["testnet10-node.chia.net"]
testnet = "testnet10"
config["full_node"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["port"] = int(testnet_port)
@ -113,6 +118,7 @@ def configure(
config["introducer"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["host"] = testnet_introducer
config["full_node"]["dns_servers"] = [testnet_dns_introducer]
config["wallet"]["dns_servers"] = [testnet_dns_introducer]
config["selected_network"] = testnet
config["harvester"]["selected_network"] = testnet
config["pool"]["selected_network"] = testnet
@ -123,6 +129,13 @@ def configure(
config["introducer"]["selected_network"] = testnet
config["wallet"]["selected_network"] = testnet
config["data_layer"]["selected_network"] = testnet
if "seeder" in config:
config["seeder"]["port"] = int(testnet_port)
config["seeder"]["other_peers_port"] = int(testnet_port)
config["seeder"]["selected_network"] = testnet
config["seeder"]["bootstrap_peers"] = bootstrap_peers
print("Default full node port, introducer and network setting updated")
change_made = True
@ -131,6 +144,7 @@ def configure(
mainnet_port = "8444"
mainnet_introducer = "introducer.chia.net"
mainnet_dns_introducer = "dns-introducer.chia.net"
bootstrap_peers = ["node.chia.net"]
net = "mainnet"
config["full_node"]["port"] = int(mainnet_port)
config["full_node"]["introducer_peer"]["port"] = int(mainnet_port)
@ -150,6 +164,13 @@ def configure(
config["ui"]["selected_network"] = net
config["introducer"]["selected_network"] = net
config["wallet"]["selected_network"] = net
if "seeder" in config:
config["seeder"]["port"] = int(mainnet_port)
config["seeder"]["other_peers_port"] = int(mainnet_port)
config["seeder"]["selected_network"] = net
config["seeder"]["bootstrap_peers"] = bootstrap_peers
print("Default full node port, introducer and network setting updated")
change_made = True
else:
@ -159,10 +180,25 @@ def configure(
config["full_node"]["peer_connect_timeout"] = int(peer_connect_timeout)
change_made = True
if crawler_db_path is not None and "seeder" in config:
config["seeder"]["crawler_db_path"] = crawler_db_path
change_made = True
if crawler_minimum_version_count is not None and "seeder" in config:
config["seeder"]["minimum_version_count"] = crawler_minimum_version_count
change_made = True
if seeder_domain_name is not None and "seeder" in config:
config["seeder"]["domain_name"] = seeder_domain_name
change_made = True
if seeder_nameserver is not None and "seeder" in config:
config["seeder"]["nameserver"] = seeder_nameserver
change_made = True
if change_made:
print("Restart any running chia services for changes to take effect")
save_config(root_path, "config.yaml", config)
return 0
@click.command("configure", short_help="Modify configuration")
@ -205,6 +241,26 @@ def configure(
)
@click.option("--set-peer-count", help="Update the target peer count (default 80)", type=str)
@click.option("--set-peer-connect-timeout", help="Update the peer connect timeout (default 30)", type=str)
@click.option(
"--crawler-db-path",
help="configures the path to the crawler database",
type=str,
)
@click.option(
"--crawler-minimum-version-count",
help="configures how many of a particular version must be seen to be reported in logs",
type=int,
)
@click.option(
"--seeder-domain-name",
help="configures the seeder domain_name setting. Ex: `seeder.example.com.`",
type=str,
)
@click.option(
"--seeder-nameserver",
help="configures the seeder nameserver setting. Ex: `example.com.`",
type=str,
)
@click.option(
"--enable-data-server",
"--data-server",
@ -224,6 +280,10 @@ def configure_cmd(
set_peer_count,
testnet,
set_peer_connect_timeout,
crawler_db_path,
crawler_minimum_version_count,
seeder_domain_name,
seeder_nameserver,
enable_data_server,
):
configure(
@ -238,5 +298,9 @@ def configure_cmd(
set_peer_count,
testnet,
set_peer_connect_timeout,
crawler_db_path,
crawler_minimum_version_count,
seeder_domain_name,
seeder_nameserver,
enable_data_server,
)

View File

@ -21,14 +21,17 @@ def db_cmd() -> None:
@click.pass_context
def db_upgrade_cmd(ctx: click.Context, no_update_config: bool, **kwargs) -> None:
in_db_path = kwargs.get("input")
out_db_path = kwargs.get("output")
db_upgrade_func(
Path(ctx.obj["root_path"]),
None if in_db_path is None else Path(in_db_path),
None if out_db_path is None else Path(out_db_path),
no_update_config,
)
try:
in_db_path = kwargs.get("input")
out_db_path = kwargs.get("output")
db_upgrade_func(
Path(ctx.obj["root_path"]),
None if in_db_path is None else Path(in_db_path),
None if out_db_path is None else Path(out_db_path),
no_update_config,
)
except RuntimeError as e:
print(f"FAILED: {e}")
if __name__ == "__main__":

View File

@ -3,14 +3,9 @@ from pathlib import Path
import sys
from time import time
import asyncio
import zstd
from chia.util.config import load_config, save_config
from chia.util.path import mkdir, path_from_root
from chia.full_node.block_store import BlockStore
from chia.full_node.coin_store import CoinStore
from chia.full_node.hint_store import HintStore
from chia.util.ints import uint32
from chia.types.blockchain_format.sized_bytes import bytes32
@ -45,7 +40,7 @@ def db_upgrade_func(
out_db_path = path_from_root(root_path, db_path_replaced)
mkdir(out_db_path.parent)
asyncio.run(convert_v1_to_v2(in_db_path, out_db_path))
convert_v1_to_v2(in_db_path, out_db_path)
if update_config:
print("updating config.yaml")
@ -64,40 +59,48 @@ HINT_COMMIT_RATE = 2000
COIN_COMMIT_RATE = 30000
async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
import aiosqlite
from chia.util.db_wrapper import DBWrapper
def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
import sqlite3
import zstd
from contextlib import closing
if not in_path.exists():
print(f"input file doesn't exist. {in_path}")
raise RuntimeError(f"can't find {in_path}")
if in_path == out_path:
print(f"output file is the same as the input {in_path}")
raise RuntimeError("invalid conversion files")
if out_path.exists():
print(f"output file already exists. {out_path}")
raise RuntimeError("already exists")
print(f"opening file for reading: {in_path}")
async with aiosqlite.connect(in_path) as in_db:
with closing(sqlite3.connect(in_path)) as in_db:
try:
async with in_db.execute("SELECT * from database_version") as cursor:
row = await cursor.fetchone()
with closing(in_db.execute("SELECT * from database_version")) as cursor:
row = cursor.fetchone()
if row is not None and row[0] != 1:
print(f"blockchain database already version {row[0]}\nDone")
raise RuntimeError("already v2")
except aiosqlite.OperationalError:
except sqlite3.OperationalError:
pass
store_v1 = await BlockStore.create(DBWrapper(in_db, db_version=1))
print(f"opening file for writing: {out_path}")
async with aiosqlite.connect(out_path) as out_db:
await out_db.execute("pragma journal_mode=OFF")
await out_db.execute("pragma synchronous=OFF")
await out_db.execute("pragma cache_size=131072")
await out_db.execute("pragma locking_mode=exclusive")
with closing(sqlite3.connect(out_path)) as out_db:
out_db.execute("pragma journal_mode=OFF")
out_db.execute("pragma synchronous=OFF")
out_db.execute("pragma cache_size=131072")
out_db.execute("pragma locking_mode=exclusive")
print("initializing v2 version")
await out_db.execute("CREATE TABLE database_version(version int)")
await out_db.execute("INSERT INTO database_version VALUES(?)", (2,))
out_db.execute("CREATE TABLE database_version(version int)")
out_db.execute("INSERT INTO database_version VALUES(?)", (2,))
print("initializing v2 block store")
await out_db.execute(
out_db.execute(
"CREATE TABLE full_blocks("
"header_hash blob PRIMARY KEY,"
"prev_hash blob,"
@ -108,16 +111,22 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
"block blob,"
"block_record blob)"
)
await out_db.execute(
out_db.execute(
"CREATE TABLE sub_epoch_segments_v3(" "ses_block_hash blob PRIMARY KEY," "challenge_segments blob)"
)
await out_db.execute("CREATE TABLE current_peak(key int PRIMARY KEY, hash blob)")
out_db.execute("CREATE TABLE current_peak(key int PRIMARY KEY, hash blob)")
peak_hash, peak_height = await store_v1.get_peak()
with closing(in_db.execute("SELECT header_hash, height from block_records WHERE is_peak = 1")) as cursor:
peak_row = cursor.fetchone()
if peak_row is None:
print("v1 database does not have a peak block, there is no blockchain to convert")
raise RuntimeError("no blockchain")
peak_hash = bytes32(bytes.fromhex(peak_row[0]))
peak_height = uint32(peak_row[1])
print(f"peak: {peak_hash.hex()} height: {peak_height}")
await out_db.execute("INSERT INTO current_peak VALUES(?, ?)", (0, peak_hash))
await out_db.commit()
out_db.execute("INSERT INTO current_peak VALUES(?, ?)", (0, peak_hash))
out_db.commit()
print("[1/5] converting full_blocks")
height = peak_height + 1
@ -129,15 +138,19 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
block_start_time = start_time
block_values = []
async with in_db.execute(
"SELECT header_hash, prev_hash, block, sub_epoch_summary FROM block_records ORDER BY height DESC"
with closing(
in_db.execute(
"SELECT header_hash, prev_hash, block, sub_epoch_summary FROM block_records ORDER BY height DESC"
)
) as cursor:
async with in_db.execute(
"SELECT header_hash, height, is_fully_compactified, block FROM full_blocks ORDER BY height DESC"
with closing(
in_db.execute(
"SELECT header_hash, height, is_fully_compactified, block FROM full_blocks ORDER BY height DESC"
)
) as cursor_2:
await out_db.execute("begin transaction")
async for row in cursor:
out_db.execute("begin transaction")
for row in cursor:
header_hash = bytes.fromhex(row[0])
if header_hash != hh:
@ -145,7 +158,7 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
# progress cursor_2 until we find the header hash
while True:
row_2 = await cursor_2.fetchone()
row_2 = cursor_2.fetchone()
if row_2 is None:
print(f"ERROR: could not find block {hh.hex()}")
raise RuntimeError(f"block {hh.hex()} not found")
@ -157,7 +170,7 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
is_fully_compactified = row_2[2]
block_bytes = row_2[3]
prev_hash = bytes.fromhex(row[1])
prev_hash = bytes32.fromhex(row[1])
block_record = row[2]
ses = row[3]
@ -184,18 +197,18 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
commit_in -= 1
if commit_in == 0:
commit_in = BLOCK_COMMIT_RATE
await out_db.executemany(
out_db.executemany(
"INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?, ?, ?, ?)", block_values
)
await out_db.commit()
await out_db.execute("begin transaction")
out_db.commit()
out_db.execute("begin transaction")
block_values = []
end_time = time()
rate = BLOCK_COMMIT_RATE / (end_time - start_time)
start_time = end_time
await out_db.executemany("INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?, ?, ?, ?)", block_values)
await out_db.commit()
out_db.executemany("INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?, ?, ?, ?)", block_values)
out_db.commit()
end_time = time()
print(f"\r {end_time - block_start_time:.2f} seconds ")
@ -204,10 +217,12 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
commit_in = SES_COMMIT_RATE
ses_values = []
ses_start_time = time()
async with in_db.execute("SELECT ses_block_hash, challenge_segments FROM sub_epoch_segments_v3") as cursor:
with closing(
in_db.execute("SELECT ses_block_hash, challenge_segments FROM sub_epoch_segments_v3")
) as cursor:
count = 0
await out_db.execute("begin transaction")
async for row in cursor:
out_db.execute("begin transaction")
for row in cursor:
block_hash = bytes32.fromhex(row[0])
ses = row[1]
ses_values.append((block_hash, ses))
@ -219,13 +234,13 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
commit_in -= 1
if commit_in == 0:
commit_in = SES_COMMIT_RATE
await out_db.executemany("INSERT INTO sub_epoch_segments_v3 VALUES (?, ?)", ses_values)
await out_db.commit()
await out_db.execute("begin transaction")
out_db.executemany("INSERT INTO sub_epoch_segments_v3 VALUES (?, ?)", ses_values)
out_db.commit()
out_db.execute("begin transaction")
ses_values = []
await out_db.executemany("INSERT INTO sub_epoch_segments_v3 VALUES (?, ?)", ses_values)
await out_db.commit()
out_db.executemany("INSERT INTO sub_epoch_segments_v3 VALUES (?, ?)", ses_values)
out_db.commit()
end_time = time()
print(f"\r {end_time - ses_start_time:.2f} seconds ")
@ -235,31 +250,32 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
commit_in = HINT_COMMIT_RATE
hint_start_time = time()
hint_values = []
await out_db.execute("CREATE TABLE hints(coin_id blob, hint blob, UNIQUE (coin_id, hint))")
await out_db.commit()
async with in_db.execute("SELECT coin_id, hint FROM hints") as cursor:
count = 0
await out_db.execute("begin transaction")
async for row in cursor:
hint_values.append((row[0], row[1]))
commit_in -= 1
if commit_in == 0:
commit_in = HINT_COMMIT_RATE
await out_db.executemany(
"INSERT OR IGNORE INTO hints VALUES(?, ?) ON CONFLICT DO NOTHING", hint_values
)
await out_db.commit()
await out_db.execute("begin transaction")
hint_values = []
out_db.execute("CREATE TABLE hints(coin_id blob, hint blob, UNIQUE (coin_id, hint))")
out_db.commit()
try:
with closing(in_db.execute("SELECT coin_id, hint FROM hints")) as cursor:
count = 0
out_db.execute("begin transaction")
for row in cursor:
hint_values.append((row[0], row[1]))
commit_in -= 1
if commit_in == 0:
commit_in = HINT_COMMIT_RATE
out_db.executemany("INSERT OR IGNORE INTO hints VALUES(?, ?)", hint_values)
out_db.commit()
out_db.execute("begin transaction")
hint_values = []
except sqlite3.OperationalError:
print(" no hints table, skipping")
await out_db.executemany("INSERT OR IGNORE INTO hints VALUES (?, ?)", hint_values)
await out_db.commit()
out_db.executemany("INSERT OR IGNORE INTO hints VALUES (?, ?)", hint_values)
out_db.commit()
end_time = time()
print(f"\r {end_time - hint_start_time:.2f} seconds ")
print("[4/5] converting coin_store")
await out_db.execute(
out_db.execute(
"CREATE TABLE coin_record("
"coin_name blob PRIMARY KEY,"
" confirmed_index bigint,"
@ -270,21 +286,24 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
" amount blob," # we use a blob of 8 bytes to store uint64
" timestamp bigint)"
)
await out_db.commit()
out_db.commit()
commit_in = COIN_COMMIT_RATE
rate = 1.0
start_time = time()
coin_values = []
coin_start_time = start_time
async with in_db.execute(
"SELECT coin_name, confirmed_index, spent_index, coinbase, puzzle_hash, coin_parent, amount, timestamp "
"FROM coin_record WHERE confirmed_index <= ?",
(peak_height,),
with closing(
in_db.execute(
"SELECT coin_name, confirmed_index, spent_index, coinbase, "
"puzzle_hash, coin_parent, amount, timestamp "
"FROM coin_record WHERE confirmed_index <= ?",
(peak_height,),
)
) as cursor:
count = 0
await out_db.execute("begin transaction")
async for row in cursor:
out_db.execute("begin transaction")
for row in cursor:
spent_index = row[2]
# in order to convert a consistent snapshot of the
@ -312,26 +331,39 @@ async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None:
commit_in -= 1
if commit_in == 0:
commit_in = COIN_COMMIT_RATE
await out_db.executemany("INSERT INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?)", coin_values)
await out_db.commit()
await out_db.execute("begin transaction")
out_db.executemany("INSERT INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?)", coin_values)
out_db.commit()
out_db.execute("begin transaction")
coin_values = []
end_time = time()
rate = COIN_COMMIT_RATE / (end_time - start_time)
start_time = end_time
await out_db.executemany("INSERT INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?)", coin_values)
await out_db.commit()
out_db.executemany("INSERT INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?)", coin_values)
out_db.commit()
end_time = time()
print(f"\r {end_time - coin_start_time:.2f} seconds ")
print("[5/5] build indices")
index_start_time = time()
print(" block store")
await BlockStore.create(DBWrapper(out_db, db_version=2))
out_db.execute("CREATE INDEX height on full_blocks(height)")
out_db.execute(
"CREATE INDEX is_fully_compactified ON"
" full_blocks(is_fully_compactified, in_main_chain) WHERE in_main_chain=1"
)
out_db.execute("CREATE INDEX main_chain ON full_blocks(height, in_main_chain) WHERE in_main_chain=1")
out_db.commit()
print(" coin store")
await CoinStore.create(DBWrapper(out_db, db_version=2))
out_db.execute("CREATE INDEX IF NOT EXISTS coin_confirmed_index on coin_record(confirmed_index)")
out_db.execute("CREATE INDEX IF NOT EXISTS coin_spent_index on coin_record(spent_index)")
out_db.execute("CREATE INDEX IF NOT EXISTS coin_puzzle_hash on coin_record(puzzle_hash)")
out_db.execute("CREATE INDEX IF NOT EXISTS coin_parent_index on coin_record(coin_parent)")
out_db.commit()
print(" hint store")
await HintStore.create(DBWrapper(out_db, db_version=2))
out_db.execute("CREATE TABLE IF NOT EXISTS hints(coin_id blob, hint blob, UNIQUE (coin_id, hint))")
out_db.commit()
end_time = time()
print(f"\r {end_time - index_start_time:.2f} seconds ")

View File

@ -41,7 +41,7 @@ from chia.wallet.derive_keys import (
)
from chia.cmds.configure import configure
private_node_names = {"full_node", "wallet", "farmer", "harvester", "timelord", "data_layer", "daemon"}
private_node_names = {"full_node", "wallet", "farmer", "harvester", "timelord", "crawler", "data_layer", "daemon"}
public_node_names = {"full_node", "wallet", "farmer", "introducer", "timelord", "data_layer"}
@ -407,7 +407,23 @@ def chia_init(
# This is reached if CHIA_ROOT is set, or if user has run chia init twice
# before a new update.
if testnet:
configure(root_path, "", "", "", "", "", "", "", "", testnet="true", peer_connect_timeout="")
configure(
root_path,
set_farmer_peer="",
set_node_introducer="",
set_fullnode_port="",
set_harvester_port="",
set_log_level="",
enable_upnp="",
set_outbound_peer_count="",
set_peer_count="",
testnet="true",
peer_connect_timeout="",
crawler_db_path="",
crawler_minimum_version_count=None,
seeder_domain_name="",
seeder_nameserver="",
)
if fix_ssl_permissions:
fix_ssl(root_path)
if should_check_keys:
@ -417,7 +433,23 @@ def chia_init(
create_default_chia_config(root_path)
if testnet:
configure(root_path, "", "", "", "", "", "", "", "", testnet="true", peer_connect_timeout="")
configure(
root_path,
set_farmer_peer="",
set_node_introducer="",
set_fullnode_port="",
set_harvester_port="",
set_log_level="",
enable_upnp="",
set_outbound_peer_count="",
set_peer_count="",
testnet="true",
peer_connect_timeout="",
crawler_db_path="",
crawler_minimum_version_count=None,
seeder_domain_name="",
seeder_nameserver="",
)
create_all_ssl(root_path)
if fix_ssl_permissions:
fix_ssl(root_path)

View File

@ -116,13 +116,6 @@ def create_cmd(
required=True,
callback=validate_fee,
)
@click.option(
"--fee",
help="Fee Per Transaction, in Mojos. Fee is used TWICE: once to leave pool, once to join.",
type=int,
callback=validate_fee,
default=0,
)
@click.option(
"-wp",
"--wallet-rpc-port",
@ -153,13 +146,6 @@ def join_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int
required=True,
callback=validate_fee,
)
@click.option(
"--fee",
help="Transaction Fee, in Mojos. Fee is charged twice if already in a pool.",
type=int,
callback=validate_fee,
default=0,
)
@click.option(
"-wp",
"--wallet-rpc-port",

View File

@ -104,7 +104,7 @@ async def create(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -
print(f"Do chia wallet get_transaction -f {fingerprint} -tx 0x{tx_record.name} to get status")
return None
except Exception as e:
print(f"Error creating plot NFT: {e}")
print(f"Error creating plot NFT: {e}\n Please start both farmer and wallet with: chia start -r farmer")
return
print("Aborting.")

View File

@ -1,204 +0,0 @@
import os
from pathlib import Path
from typing import Dict
import click
import chia.cmds.configure as chia_configure
from chia import __version__
from chia.cmds.chia import monkey_patch_click
from chia.cmds.init_funcs import init
from chia.seeder.util.config import patch_default_seeder_config
from chia.seeder.util.service_groups import all_groups, services_for_groups
from chia.seeder.util.service import launch_service, kill_service
from chia.util.config import load_config, save_config
from chia.util.default_root import DEFAULT_ROOT_PATH
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@click.group(
help=f"\n Manage the Chia Seeder ({__version__})\n",
epilog="Try 'chia seeder start crawler' or 'chia seeder start server'",
context_settings=CONTEXT_SETTINGS,
)
@click.option("--root-path", default=DEFAULT_ROOT_PATH, help="Config file root", type=click.Path(), show_default=True)
@click.pass_context
def cli(
ctx: click.Context,
root_path: str,
) -> None:
from pathlib import Path
ctx.ensure_object(dict)
ctx.obj["root_path"] = Path(root_path)
@cli.command("version", short_help="Show the Chia Seeder version")
def version_cmd() -> None:
print(__version__)
@click.command("init", short_help="Create or migrate the configuration")
@click.pass_context
def init_cmd(ctx: click.Context, **kwargs):
print("Calling Chia Seeder Init...")
init(None, ctx.obj["root_path"], True)
if os.environ.get("CHIA_ROOT", None) is not None:
print(f"warning, your CHIA_ROOT is set to {os.environ['CHIA_ROOT']}.")
root_path = ctx.obj["root_path"]
print(f"Chia directory {root_path}")
if root_path.is_dir() and not Path(root_path / "config" / "config.yaml").exists():
# This is reached if CHIA_ROOT is set, but there is no config
# This really shouldn't happen, but if we dont have the base chia config, we can't continue
print("Config does not exist. Can't continue!")
return -1
patch_default_seeder_config(root_path)
return 0
@click.command("start", short_help="Start service groups")
@click.argument("group", type=click.Choice(all_groups()), nargs=-1, required=True)
@click.pass_context
def start_cmd(ctx: click.Context, group: str) -> None:
services = services_for_groups(group)
for service in services:
print(f"Starting {service}")
launch_service(ctx.obj["root_path"], service)
@click.command("stop", short_help="Stop service groups")
@click.argument("group", type=click.Choice(all_groups()), nargs=-1, required=True)
@click.pass_context
def stop_cmd(ctx: click.Context, group: str) -> None:
services = services_for_groups(group)
for service in services:
print(f"Stopping {service}")
kill_service(ctx.obj["root_path"], service)
def configure(
root_path: Path,
testnet: str,
crawler_db_path: str,
minimum_version_count: int,
domain_name: str,
nameserver: str,
):
# Run the parent config, in case anything there (testnet) needs to be run, THEN load the config for local changes
chia_configure.configure(root_path, "", "", "", "", "", "", "", "", testnet, "")
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml")
change_made = False
if testnet is not None:
if testnet == "true" or testnet == "t":
print("Updating Chia Seeder to testnet settings")
port = 58444
network = "testnet10"
bootstrap = ["testnet-node.chia.net"]
config["seeder"]["port"] = port
config["seeder"]["other_peers_port"] = port
config["seeder"]["selected_network"] = network
config["seeder"]["bootstrap_peers"] = bootstrap
change_made = True
elif testnet == "false" or testnet == "f":
print("Updating Chia Seeder to mainnet settings")
port = 8444
network = "mainnet"
bootstrap = ["node.chia.net"]
config["seeder"]["port"] = port
config["seeder"]["other_peers_port"] = port
config["seeder"]["selected_network"] = network
config["seeder"]["bootstrap_peers"] = bootstrap
change_made = True
else:
print("Please choose True or False")
if crawler_db_path is not None:
config["seeder"]["crawler_db_path"] = crawler_db_path
change_made = True
if minimum_version_count is not None:
config["seeder"]["minimum_version_count"] = minimum_version_count
change_made = True
if domain_name is not None:
config["seeder"]["domain_name"] = domain_name
change_made = True
if nameserver is not None:
config["seeder"]["nameserver"] = nameserver
change_made = True
if change_made:
print("Restart any running Chia Seeder services for changes to take effect")
save_config(root_path, "config.yaml", config)
return 0
@click.command("configure", short_help="Modify configuration")
@click.option(
"--testnet",
"-t",
help="configures for connection to testnet",
type=click.Choice(["true", "t", "false", "f"]),
)
@click.option(
"--crawler-db-path",
help="configures for path to the crawler database",
type=str,
)
@click.option(
"--minimum-version-count",
help="configures how many of a particular version must be seen to be reported in logs",
type=int,
)
@click.option(
"--domain-name",
help="configures the domain_name setting. Ex: `seeder.example.com.`",
type=str,
)
@click.option(
"--nameserver",
help="configures the nameserver setting. Ex: `example.com.`",
type=str,
)
@click.pass_context
def configure_cmd(
ctx,
testnet,
crawler_db_path,
minimum_version_count,
domain_name,
nameserver,
):
configure(
ctx.obj["root_path"],
testnet,
crawler_db_path,
minimum_version_count,
domain_name,
nameserver,
)
cli.add_command(init_cmd)
cli.add_command(start_cmd)
cli.add_command(stop_cmd)
cli.add_command(configure_cmd)
def main() -> None:
monkey_patch_click()
cli() # pylint: disable=no-value-for-parameter
if __name__ == "__main__":
main()

View File

@ -1,8 +1,58 @@
from typing import Any, Optional, Union
from typing import Any, Optional, Union, Dict
from chia.types.blockchain_format.sized_bytes import bytes32
import click
from chia.util.network import is_trusted_inner
async def print_connections(client, time, NodeType, trusted_peers: Dict):
connections = await client.get_connections()
print("Connections:")
print("Type IP Ports NodeID Last Connect" + " MiB Up|Dwn")
for con in connections:
last_connect_tuple = time.struct_time(time.localtime(con["last_message_time"]))
last_connect = time.strftime("%b %d %T", last_connect_tuple)
mb_down = con["bytes_read"] / (1024 * 1024)
mb_up = con["bytes_written"] / (1024 * 1024)
host = con["peer_host"]
# Strip IPv6 brackets
host = host.strip("[]")
trusted: bool = is_trusted_inner(host, con["node_id"], trusted_peers, False)
# Nodetype length is 9 because INTRODUCER will be deprecated
if NodeType(con["type"]) is NodeType.FULL_NODE:
peak_height = con.get("peak_height", None)
connection_peak_hash = con.get("peak_hash", None)
if connection_peak_hash is None:
connection_peak_hash = "No Info"
else:
if connection_peak_hash.startswith(("0x", "0X")):
connection_peak_hash = connection_peak_hash[2:]
connection_peak_hash = f"{connection_peak_hash[:8]}..."
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
f"\n "
)
if peak_height is not None:
con_str += f"-Height: {peak_height:8.0f} -Hash: {connection_peak_hash} -Trusted: {trusted}"
else:
con_str += f"-Height: No Info -Hash: {connection_peak_hash} -Trusted: {trusted}"
else:
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
)
print(con_str)
async def show_async(
rpc_port: Optional[int],
@ -15,10 +65,8 @@ async def show_async(
block_by_header_hash: str,
) -> None:
import aiohttp
import time
import traceback
from time import localtime, struct_time
import time
from typing import List, Optional
from chia.consensus.block_record import BlockRecord
from chia.rpc.full_node_rpc_client import FullNodeRpcClient
@ -84,7 +132,7 @@ async def show_async(
while curr is not None and not curr.is_transaction_block:
curr = await client.get_block_record(curr.prev_hash)
peak_time = curr.timestamp
peak_time_struct = struct_time(localtime(peak_time))
peak_time_struct = time.struct_time(time.localtime(peak_time))
print(
" Time:",
@ -115,51 +163,8 @@ async def show_async(
if show_connections:
print("")
if show_connections:
connections = await client.get_connections()
print("Connections:")
print(
"Type IP Ports NodeID Last Connect"
+ " MiB Up|Dwn"
)
for con in connections:
last_connect_tuple = struct_time(localtime(con["last_message_time"]))
last_connect = time.strftime("%b %d %T", last_connect_tuple)
mb_down = con["bytes_read"] / (1024 * 1024)
mb_up = con["bytes_written"] / (1024 * 1024)
host = con["peer_host"]
# Strip IPv6 brackets
host = host.strip("[]")
# Nodetype length is 9 because INTRODUCER will be deprecated
if NodeType(con["type"]) is NodeType.FULL_NODE:
peak_height = con["peak_height"]
connection_peak_hash = con["peak_hash"]
if connection_peak_hash is None:
connection_peak_hash = "No Info"
else:
if connection_peak_hash.startswith(("0x", "0X")):
connection_peak_hash = connection_peak_hash[2:]
connection_peak_hash = f"{connection_peak_hash[:8]}..."
if peak_height is None:
peak_height = 0
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
f"\n "
f"-SB Height: {peak_height:8.0f} -Hash: {connection_peak_hash}"
)
else:
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
)
print(con_str)
trusted_peers: Dict = config["full_node"].get("trusted_peers", {})
await print_connections(client, time, NodeType, trusted_peers)
# if called together with state, leave a blank line
if state:
print("")
@ -217,8 +222,8 @@ async def show_async(
difficulty = block.weight
if block.is_transaction_block:
assert full_block.transactions_info is not None
block_time = struct_time(
localtime(
block_time = time.struct_time(
time.localtime(
full_block.foliage_transaction_block.timestamp
if full_block.foliage_transaction_block
else None

View File

@ -8,8 +8,10 @@ from typing import Any, Callable, List, Optional, Tuple, Dict
import aiohttp
from chia.cmds.show import print_connections
from chia.cmds.units import units
from chia.rpc.wallet_rpc_client import WalletRpcClient
from chia.server.outbound_message import NodeType
from chia.server.start_wallet import SERVICE_NAME
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.bech32m import encode_puzzle_hash
@ -471,6 +473,10 @@ async def print_balances(args: dict, wallet_client: WalletRpcClient, fingerprint
)
print(f" -Spendable: {print_balance(balances['spendable_balance'], scale, address_prefix)}")
print(" ")
trusted_peers: Dict = config.get("trusted_peers", {})
await print_connections(wallet_client, time, NodeType, trusted_peers)
async def get_wallet(wallet_client: WalletRpcClient, fingerprint: int = None) -> Optional[Tuple[WalletRpcClient, int]]:
if fingerprint is not None:

View File

@ -29,6 +29,7 @@ from chia.full_node.block_store import BlockStore
from chia.full_node.coin_store import CoinStore
from chia.full_node.hint_store import HintStore
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
from chia.types.block_protocol import BlockInfo
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
@ -47,7 +48,6 @@ from chia.util.errors import ConsensusError, Err
from chia.util.generator_tools import get_block_header, tx_removals_and_additions
from chia.util.ints import uint16, uint32, uint64, uint128
from chia.util.streamable import recurse_jsonify
from chia.types.block_protocol import BlockInfo
log = logging.getLogger(__name__)
@ -678,6 +678,8 @@ class Blockchain(BlockchainInterface):
return self.__height_map.get_ses(height)
def height_to_hash(self, height: uint32) -> Optional[bytes32]:
if not self.__height_map.contains_height(height):
return None
return self.__height_map.get_hash(height)
def contains_height(self, height: uint32) -> bool:
@ -881,18 +883,22 @@ class Blockchain(BlockchainInterface):
):
# We are not in a reorg, no need to look up alternate header hashes
# (we can get them from height_to_hash)
for ref_height in block.transactions_generator_ref_list:
header_hash = self.height_to_hash(ref_height)
if self.block_store.db_wrapper.db_version == 2:
# in the v2 database, we can look up blocks by height directly
# (as long as we're in the main chain)
result = await self.block_store.get_generators_at(block.transactions_generator_ref_list)
else:
for ref_height in block.transactions_generator_ref_list:
header_hash = self.height_to_hash(ref_height)
# if ref_height is invalid, this block should have failed with
# FUTURE_GENERATOR_REFS before getting here
assert header_hash is not None
# if ref_height is invalid, this block should have failed with
# FUTURE_GENERATOR_REFS before getting here
assert header_hash is not None
ref_block = await self.block_store.get_full_block(header_hash)
assert ref_block is not None
if ref_block.transactions_generator is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
result.append(ref_block.transactions_generator)
ref_gen = await self.block_store.get_generator(header_hash)
if ref_gen is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
result.append(ref_gen)
else:
# First tries to find the blocks in additional_blocks
reorg_chain: Dict[uint32, FullBlock] = {}
@ -933,15 +939,17 @@ class Blockchain(BlockchainInterface):
else:
if ref_height in additional_height_dict:
ref_block = additional_height_dict[ref_height]
assert ref_block is not None
if ref_block.transactions_generator is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
result.append(ref_block.transactions_generator)
else:
header_hash = self.height_to_hash(ref_height)
# TODO: address hint error and remove ignore
# error: Argument 1 to "get_full_block" of "Blockchain" has incompatible type
# "Optional[bytes32]"; expected "bytes32" [arg-type]
ref_block = await self.get_full_block(header_hash) # type: ignore[arg-type]
assert ref_block is not None
if ref_block.transactions_generator is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
result.append(ref_block.transactions_generator)
if header_hash is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
gen = await self.block_store.get_generator(header_hash)
if gen is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
result.append(gen)
assert len(result) == len(ref_list)
return BlockGenerator(block.transactions_generator, result, [])

View File

@ -54,7 +54,7 @@ testnet_kwargs = {
"MAX_GENERATOR_SIZE": 1000000,
"MAX_GENERATOR_REF_LIST_SIZE": 512, # Number of references allowed in the block generator ref list
"POOL_SUB_SLOT_ITERS": 37600000000, # iters limit * NUM_SPS
"SOFT_FORK_HEIGHT": 2000000,
"SOFT_FORK_HEIGHT": 2300000,
}

View File

@ -101,9 +101,8 @@ if getattr(sys, "frozen", False):
"chia_timelord": "start_timelord",
"chia_timelord_launcher": "timelord_launcher",
"chia_full_node_simulator": "start_simulator",
"chia_seeder": "chia_seeder",
"chia_seeder_crawler": "chia_seeder_crawler",
"chia_seeder_dns": "chia_seeder_dns",
"chia_seeder": "start_seeder",
"chia_crawler": "start_crawler",
}
def executable_for_service(service_name: str) -> str:
@ -116,7 +115,6 @@ if getattr(sys, "frozen", False):
path = f"{application_path}/{name_map[service_name]}"
return path
else:
application_path = os.path.dirname(__file__)
@ -1073,6 +1071,7 @@ class WebSocketServer:
error = None
success = False
testing = False
already_running = False
if "testing" in request:
testing = request["testing"]
@ -1086,9 +1085,17 @@ class WebSocketServer:
self.services.pop(service_command)
error = None
else:
error = f"Service {service_command} already running"
self.log.info(f"Service {service_command} already running")
already_running = True
elif len(self.connections.get(service_command, [])) > 0:
# If the service was started manually (not launched by the daemon), we should
# have a connection to it.
self.log.info(f"Service {service_command} already registered")
already_running = True
if error is None:
if already_running:
success = True
elif error is None:
try:
exe_command = service_command
if testing is True:
@ -1123,6 +1130,12 @@ class WebSocketServer:
else:
process = self.services.get(service_name)
is_running = process is not None and process.poll() is None
if not is_running:
# Check if we have a connection to the requested service. This might be the
# case if the service was started manually (i.e. not started by the daemon).
service_connections = self.connections.get(service_name)
if service_connections is not None:
is_running = len(service_connections) > 0
response = {
"success": True,
"service_name": service_name,

View File

@ -264,7 +264,7 @@ class DataLayerWallet:
)
await self.wallet_state_manager.dl_store.add_launcher(launcher_spend.coin, in_transaction)
await self.wallet_state_manager.add_interested_puzzle_hash(launcher_id, self.id(), in_transaction)
await self.wallet_state_manager.add_interested_puzzle_hashes([launcher_id], [self.id()], in_transaction)
await self.wallet_state_manager.coin_store.add_coin_record(
WalletCoinRecord(
new_singleton,
@ -408,7 +408,7 @@ class DataLayerWallet:
)
await self.wallet_state_manager.dl_store.add_singleton_record(singleton_record, False)
await self.wallet_state_manager.add_interested_puzzle_hash(singleton_record.launcher_id, self.id(), False)
await self.wallet_state_manager.add_interested_puzzle_hashes([singleton_record.launcher_id], [self.id()], False)
return dl_record, std_record, launcher_coin.name()
@ -754,7 +754,7 @@ class DataLayerWallet:
self.id(),
)
)
await self.wallet_state_manager.add_interested_coin_id(new_singleton.name())
await self.wallet_state_manager.add_interested_coin_ids([new_singleton.name()])
await self.potentially_handle_resubmit(singleton_record.launcher_id)
async def potentially_handle_resubmit(self, launcher_id: bytes32) -> None:

View File

@ -18,7 +18,7 @@ from chia.daemon.keychain_proxy import (
connect_to_keychain_and_validate,
wrap_local_keychain,
)
from chia.pools.pool_config import PoolWalletConfig, load_pool_config
from chia.pools.pool_config import PoolWalletConfig, load_pool_config, add_auth_key
from chia.protocols import farmer_protocol, harvester_protocol
from chia.protocols.pool_protocol import (
ErrorResponse,
@ -128,6 +128,17 @@ class Farmer:
self.started = False
self.harvester_handshake_task: Optional[asyncio.Task] = None
# From p2_singleton_puzzle_hash to pool state dict
self.pool_state: Dict[bytes32, Dict] = {}
# From p2_singleton to auth PrivateKey
self.authentication_keys: Dict[bytes32, PrivateKey] = {}
# Last time we updated pool_state based on the config file
self.last_config_access_time: uint64 = uint64(0)
self.harvester_cache: Dict[str, Dict[str, HarvesterCacheEntry]] = {}
async def ensure_keychain_proxy(self) -> KeychainProxy:
if not self.keychain_proxy:
if self.local_keychain:
@ -153,6 +164,15 @@ class Farmer:
log.warning(no_keys_error_str)
return False
config = load_config(self._root_path, "config.yaml")
if "xch_target_address" not in self.config:
self.config = config["farmer"]
if "xch_target_address" not in self.pool_config:
self.pool_config = config["pool"]
if "xch_target_address" not in self.config or "xch_target_address" not in self.pool_config:
log.debug("xch_target_address missing in the config")
return False
# This is the farmer configuration
self.farmer_target_encoded = self.config["xch_target_address"]
self.farmer_target = decode_puzzle_hash(self.farmer_target_encoded)
@ -172,19 +192,6 @@ class Farmer:
log.warning(no_keys_error_str)
return False
# The variables below are for use with an actual pool
# From p2_singleton_puzzle_hash to pool state dict
self.pool_state: Dict[bytes32, Dict] = {}
# From p2_singleton to auth PrivateKey
self.authentication_keys: Dict[bytes32, PrivateKey] = {}
# Last time we updated pool_state based on the config file
self.last_config_access_time: uint64 = uint64(0)
self.harvester_cache: Dict[str, Dict[str, HarvesterCacheEntry]] = {}
return True
async def _start(self):
@ -415,6 +422,7 @@ class Farmer:
async def update_pool_state(self):
config = load_config(self._root_path, "config.yaml")
pool_config_list: List[PoolWalletConfig] = load_pool_config(self._root_path)
for pool_config in pool_config_list:
p2_singleton_puzzle_hash = pool_config.p2_singleton_puzzle_hash
@ -425,6 +433,9 @@ class Farmer:
if authentication_sk is None:
self.log.error(f"Could not find authentication sk for {p2_singleton_puzzle_hash}")
continue
add_auth_key(self._root_path, pool_config, authentication_sk.get_g1())
if p2_singleton_puzzle_hash not in self.pool_state:
self.pool_state[p2_singleton_puzzle_hash] = {
"points_found_since_start": 0,

View File

@ -7,10 +7,13 @@ import zstd
from chia.consensus.block_record import BlockRecord
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.full_block import FullBlock
from chia.types.blockchain_format.program import SerializedProgram
from chia.types.weight_proof import SubEpochChallengeSegment, SubEpochSegments
from chia.util.errors import Err
from chia.util.db_wrapper import DBWrapper
from chia.util.ints import uint32
from chia.util.lru_cache import LRUCache
from chia.util.full_block_utils import generator_from_block
log = logging.getLogger(__name__)
@ -52,6 +55,8 @@ class BlockStore:
# peak. The "key" field is there to make update statements simple
await self.db.execute("CREATE TABLE IF NOT EXISTS current_peak(key int PRIMARY KEY, hash blob)")
# If any of these indices are altered, they should also be altered
# in the chia/cmds/db_upgrade.py file
await self.db.execute("CREATE INDEX IF NOT EXISTS height on full_blocks(height)")
# Sub epoch segments for weight proofs
@ -61,6 +66,8 @@ class BlockStore:
"challenge_segments blob)"
)
# If any of these indices are altered, they should also be altered
# in the chia/cmds/db_upgrade.py file
await self.db.execute(
"CREATE INDEX IF NOT EXISTS is_fully_compactified ON"
" full_blocks(is_fully_compactified, in_main_chain) WHERE in_main_chain=1"
@ -97,7 +104,7 @@ class BlockStore:
await self.db.execute("CREATE INDEX IF NOT EXISTS height on block_records(height)")
await self.db.execute("CREATE INDEX IF NOT EXISTS peak on block_records(is_peak) where is_peak = 1")
await self.db.execute("CREATE INDEX IF NOT EXISTS peak on block_records(is_peak)")
await self.db.commit()
self.block_cache = LRUCache(1000)
@ -294,6 +301,64 @@ class BlockStore:
ret.append(self.maybe_decompress(row[0]))
return ret
async def get_generator(self, header_hash: bytes32) -> Optional[SerializedProgram]:
cached = self.block_cache.get(header_hash)
if cached is not None:
log.debug(f"cache hit for block {header_hash.hex()}")
return cached.transactions_generator
formatted_str = "SELECT block, height from full_blocks WHERE header_hash=?"
async with self.db.execute(formatted_str, (self.maybe_to_hex(header_hash),)) as cursor:
row = await cursor.fetchone()
if row is None:
return None
if self.db_wrapper.db_version == 2:
block_bytes = zstd.decompress(row[0])
else:
block_bytes = row[0]
try:
return generator_from_block(block_bytes)
except Exception as e:
log.error(f"cheap parser failed for block at height {row[1]}: {e}")
# this is defensive, on the off-chance that
# generator_from_block() fails, fall back to the reliable
# definition of parsing a block
b = FullBlock.from_bytes(block_bytes)
return b.transactions_generator
async def get_generators_at(self, heights: List[uint32]) -> List[SerializedProgram]:
assert self.db_wrapper.db_version == 2
if len(heights) == 0:
return []
generators: Dict[uint32, SerializedProgram] = {}
heights_db = tuple(heights)
formatted_str = (
f"SELECT block, height from full_blocks "
f'WHERE in_main_chain=1 AND height in ({"?," * (len(heights_db) - 1)}?)'
)
async with self.db.execute(formatted_str, heights_db) as cursor:
async for row in cursor:
block_bytes = zstd.decompress(row[0])
try:
gen = generator_from_block(block_bytes)
except Exception as e:
log.error(f"cheap parser failed for block at height {row[1]}: {e}")
# this is defensive, on the off-chance that
# generator_from_block() fails, fall back to the reliable
# definition of parsing a block
b = FullBlock.from_bytes(block_bytes)
gen = b.transactions_generator
if gen is None:
raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
generators[uint32(row[1])] = gen
return [generators[h] for h in heights]
async def get_block_records_by_hash(self, header_hashes: List[bytes32]):
"""
Returns a list of Block Records, ordered by the same order in which header_hashes are passed in.

View File

@ -7,11 +7,14 @@ from chia.types.coin_record import CoinRecord
from chia.util.db_wrapper import DBWrapper
from chia.util.ints import uint32, uint64
from chia.util.lru_cache import LRUCache
from time import time
from chia.util.chunks import chunks
import time
import logging
log = logging.getLogger(__name__)
MAX_SQLITE_PARAMETERS = 900
class CoinStore:
"""
@ -114,7 +117,7 @@ class CoinStore:
Returns a list of the CoinRecords that were added by this block
"""
start = time()
start = time.monotonic()
additions = []
@ -146,10 +149,10 @@ class CoinStore:
await self._add_coin_records(additions)
await self._set_spent(tx_removals, height)
end = time()
end = time.monotonic()
log.log(
logging.WARNING if end - start > 10 else logging.DEBUG,
f"It took {end - start:0.2f}s to apply {len(tx_additions)} additions and "
f"Height {height}: It took {end - start:0.2f}s to apply {len(tx_additions)} additions and "
+ f"{len(tx_removals)} removals to the coin store. Make sure "
+ "blockchain database is on a fast drive",
)
@ -305,31 +308,31 @@ class CoinStore:
self,
include_spent_coins: bool,
puzzle_hashes: List[bytes32],
start_height: uint32 = uint32(0),
end_height: uint32 = uint32((2 ** 32) - 1),
min_height: uint32 = uint32(0),
) -> List[CoinState]:
if len(puzzle_hashes) == 0:
return []
coins = set()
puzzle_hashes_db: Tuple[Any, ...]
if self.db_wrapper.db_version == 2:
puzzle_hashes_db = tuple(puzzle_hashes)
else:
puzzle_hashes_db = tuple([ph.hex() for ph in puzzle_hashes])
async with self.coin_record_db.execute(
f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
f"coin_parent, amount, timestamp FROM coin_record INDEXED BY coin_puzzle_hash "
f'WHERE puzzle_hash in ({"?," * (len(puzzle_hashes) - 1)}?) '
f"AND confirmed_index>=? AND confirmed_index<? "
f"{'' if include_spent_coins else 'AND spent_index=0'}",
puzzle_hashes_db + (start_height, end_height),
) as cursor:
for puzzles in chunks(puzzle_hashes, MAX_SQLITE_PARAMETERS):
puzzle_hashes_db: Tuple[Any, ...]
if self.db_wrapper.db_version == 2:
puzzle_hashes_db = tuple(puzzles)
else:
puzzle_hashes_db = tuple([ph.hex() for ph in puzzles])
async with self.coin_record_db.execute(
f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
f"coin_parent, amount, timestamp FROM coin_record INDEXED BY coin_puzzle_hash "
f'WHERE puzzle_hash in ({"?," * (len(puzzles) - 1)}?) '
f"AND (confirmed_index>=? OR spent_index>=?)"
f"{'' if include_spent_coins else 'AND spent_index=0'}",
puzzle_hashes_db + (min_height, min_height),
) as cursor:
for row in await cursor.fetchall():
coins.add(self.row_to_coin_state(row))
async for row in cursor:
coins.add(self.row_to_coin_state(row))
return list(coins)
return list(coins)
async def get_coin_records_by_parent_ids(
self,
@ -342,51 +345,51 @@ class CoinStore:
return []
coins = set()
parent_ids_db: Tuple[Any, ...]
if self.db_wrapper.db_version == 2:
parent_ids_db = tuple(parent_ids)
else:
parent_ids_db = tuple([pid.hex() for pid in parent_ids])
async with self.coin_record_db.execute(
f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
f'coin_parent, amount, timestamp FROM coin_record WHERE coin_parent in ({"?," * (len(parent_ids) - 1)}?) '
f"AND confirmed_index>=? AND confirmed_index<? "
f"{'' if include_spent_coins else 'AND spent_index=0'}",
parent_ids_db + (start_height, end_height),
) as cursor:
for ids in chunks(parent_ids, MAX_SQLITE_PARAMETERS):
parent_ids_db: Tuple[Any, ...]
if self.db_wrapper.db_version == 2:
parent_ids_db = tuple(ids)
else:
parent_ids_db = tuple([pid.hex() for pid in ids])
async with self.coin_record_db.execute(
f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
f'coin_parent, amount, timestamp FROM coin_record WHERE coin_parent in ({"?," * (len(ids) - 1)}?) '
f"AND confirmed_index>=? AND confirmed_index<? "
f"{'' if include_spent_coins else 'AND spent_index=0'}",
parent_ids_db + (start_height, end_height),
) as cursor:
for row in await cursor.fetchall():
coin = self.row_to_coin(row)
coins.add(CoinRecord(coin, row[0], row[1], row[2], row[6]))
return list(coins)
async for row in cursor:
coin = self.row_to_coin(row)
coins.add(CoinRecord(coin, row[0], row[1], row[2], row[6]))
return list(coins)
async def get_coin_state_by_ids(
async def get_coin_states_by_ids(
self,
include_spent_coins: bool,
coin_ids: List[bytes32],
start_height: uint32 = uint32(0),
end_height: uint32 = uint32((2 ** 32) - 1),
min_height: uint32 = uint32(0),
) -> List[CoinState]:
if len(coin_ids) == 0:
return []
coins = set()
coin_ids_db: Tuple[Any, ...]
if self.db_wrapper.db_version == 2:
coin_ids_db = tuple(coin_ids)
else:
coin_ids_db = tuple([pid.hex() for pid in coin_ids])
async with self.coin_record_db.execute(
f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
f'coin_parent, amount, timestamp FROM coin_record WHERE coin_name in ({"?," * (len(coin_ids) - 1)}?) '
f"AND confirmed_index>=? AND confirmed_index<? "
f"{'' if include_spent_coins else 'AND spent_index=0'}",
coin_ids_db + (start_height, end_height),
) as cursor:
for row in await cursor.fetchall():
coins.add(self.row_to_coin_state(row))
return list(coins)
for ids in chunks(coin_ids, MAX_SQLITE_PARAMETERS):
coin_ids_db: Tuple[Any, ...]
if self.db_wrapper.db_version == 2:
coin_ids_db = tuple(ids)
else:
coin_ids_db = tuple([pid.hex() for pid in ids])
async with self.coin_record_db.execute(
f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, "
f'coin_parent, amount, timestamp FROM coin_record WHERE coin_name in ({"?," * (len(ids) - 1)}?) '
f"AND (confirmed_index>=? OR spent_index>=?)"
f"{'' if include_spent_coins else 'AND spent_index=0'}",
coin_ids_db + (min_height, min_height),
) as cursor:
async for row in cursor:
coins.add(self.row_to_coin_state(row))
return list(coins)
async def rollback_to_block(self, block_index: int) -> List[CoinRecord]:
"""

View File

@ -1,4 +1,5 @@
import asyncio
import contextlib
import dataclasses
import logging
import random
@ -729,9 +730,9 @@ class FullNode:
self._transaction_queue_task.cancel()
if hasattr(self, "_blockchain_lock_queue"):
self._blockchain_lock_queue.close()
cancel_task_safe(task=self._sync_task, log=self.log)
async def _await_closed(self):
cancel_task_safe(self._sync_task, self.log)
for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()):
cancel_task_safe(task, self.log)
await self.connection.close()
@ -739,6 +740,9 @@ class FullNode:
await asyncio.wait([self._init_weight_proof])
if hasattr(self, "_blockchain_lock_queue"):
await self._blockchain_lock_queue.await_closed()
if self._sync_task is not None:
with contextlib.suppress(asyncio.CancelledError):
await self._sync_task
async def _sync(self):
"""
@ -939,10 +943,10 @@ class FullNode:
await peer.close(600)
raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
self.log.info(f"Added blocks {start_height} to {end_height}")
await self.send_peak_to_wallets()
peak = self.blockchain.get_peak()
if len(coin_states) > 0 and fork_height is not None:
await self.update_wallets(peak.height, fork_height, peak.header_hash, coin_states)
await self.send_peak_to_wallets()
self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE)
loop = asyncio.get_event_loop()
@ -1047,15 +1051,18 @@ class FullNode:
# Validates signatures in multiprocessing since they take a while, and we don't have cached transactions
# for these blocks (unlike during normal operation where we validate one at a time)
pre_validate_start = time.time()
pre_validate_start = time.monotonic()
pre_validation_results: List[PreValidationResult] = await self.blockchain.pre_validate_blocks_multiprocessing(
blocks_to_validate, {}, wp_summaries=wp_summaries, validate_signatures=True
)
pre_validate_end = time.time()
if pre_validate_end - pre_validate_start > 10:
self.log.warning(f"Block pre-validation time: {pre_validate_end - pre_validate_start:0.2f} seconds")
else:
self.log.debug(f"Block pre-validation time: {pre_validate_end - pre_validate_start:0.2f} seconds")
pre_validate_end = time.monotonic()
pre_validate_time = pre_validate_end - pre_validate_start
self.log.log(
logging.WARNING if pre_validate_time > 10 else logging.DEBUG,
f"Block pre-validation time: {pre_validate_end - pre_validate_start:0.2f} seconds "
f"({len(blocks_to_validate)} blocks, start height: {blocks_to_validate[0].height})",
)
for i, block in enumerate(blocks_to_validate):
if pre_validation_results[i].error is not None:
self.log.error(
@ -1203,6 +1210,8 @@ class FullNode:
msg = make_msg(ProtocolMessageTypes.new_signage_point, broadcast_farmer)
await self.server.send_to_all([msg], NodeType.FARMER)
self._state_changed("signage_point", {"broadcast_farmer": broadcast_farmer})
async def peak_post_processing(
self,
block: FullBlock,
@ -1553,7 +1562,7 @@ class FullNode:
f"Block validation time: {validation_time:0.2f} seconds, "
f"pre_validation time: {pre_validation_time:0.2f} seconds, "
f"cost: {block.transactions_info.cost if block.transactions_info is not None else 'None'}"
f"{percent_full_str}",
f"{percent_full_str} header_hash: {header_hash} height: {block.height}",
)
# This code path is reached if added == ADDED_AS_ORPHAN or NEW_TIP

View File

@ -1436,12 +1436,12 @@ class FullNodeAPI:
# Send all coins with requested puzzle hash that have been created after the specified height
states: List[CoinState] = await self.full_node.coin_store.get_coin_states_by_puzzle_hashes(
include_spent_coins=True, puzzle_hashes=request.puzzle_hashes, start_height=request.min_height
include_spent_coins=True, puzzle_hashes=request.puzzle_hashes, min_height=request.min_height
)
if len(hint_coin_ids) > 0:
hint_states = await self.full_node.coin_store.get_coin_state_by_ids(
include_spent_coins=True, coin_ids=hint_coin_ids, start_height=request.min_height
hint_states = await self.full_node.coin_store.get_coin_states_by_ids(
include_spent_coins=True, coin_ids=hint_coin_ids, min_height=request.min_height
)
states.extend(hint_states)
@ -1471,8 +1471,8 @@ class FullNodeAPI:
self.full_node.peer_coin_ids[peer.peer_node_id].add(coin_id)
self.full_node.peer_sub_counter[peer.peer_node_id] += 1
states: List[CoinState] = await self.full_node.coin_store.get_coin_state_by_ids(
include_spent_coins=True, coin_ids=request.coin_ids, start_height=request.min_height
states: List[CoinState] = await self.full_node.coin_store.get_coin_states_by_ids(
include_spent_coins=True, coin_ids=request.coin_ids, min_height=request.min_height
)
response = wallet_protocol.RespondToCoinUpdates(request.coin_ids, request.min_height, states)

View File

@ -38,7 +38,7 @@ class HintStore:
async def add_hints(self, coin_hint_list: List[Tuple[bytes32, bytes]]) -> None:
if self.db_wrapper.db_version == 2:
cursor = await self.db_wrapper.db.executemany(
"INSERT INTO hints VALUES(?, ?) ON CONFLICT DO NOTHING",
"INSERT OR IGNORE INTO hints VALUES(?, ?)",
coin_hint_list,
)
else:

View File

@ -8,10 +8,10 @@ from chia.consensus.cost_calculator import NPCResult
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.full_node.generator import create_generator_args, setup_generator_args
from chia.types.coin_record import CoinRecord
from chia.types.condition_opcodes import ConditionOpcode
from chia.types.condition_with_args import ConditionWithArgs
from chia.types.generator_types import BlockGenerator
from chia.types.name_puzzle_condition import NPC
from chia.util.condition_tools import ConditionOpcode
from chia.util.errors import Err
from chia.util.ints import uint32, uint64, uint16
from chia.wallet.puzzles.generator_loader import GENERATOR_FOR_SINGLE_COIN_MOD

View File

@ -7,6 +7,7 @@ from concurrent.futures.process import ProcessPoolExecutor
from typing import Dict, List, Optional, Set, Tuple
from blspy import GTElement
from chiabip158 import PyBIP158
from clvm.casts import int_from_bytes
from chia.util import cached_bls
from chia.consensus.block_record import BlockRecord
@ -27,7 +28,6 @@ from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.types.mempool_item import MempoolItem
from chia.types.spend_bundle import SpendBundle
from chia.util.cached_bls import LOCAL_CACHE
from chia.util.clvm import int_from_bytes
from chia.util.condition_tools import pkm_pairs
from chia.util.errors import Err, ValidationError
from chia.util.generator_tools import additions_for_npc
@ -304,7 +304,7 @@ class MempoolManager:
for add in additions:
additions_dict[add.name()] = add
addition_amount = uint64(0)
addition_amount: int = 0
# Check additions for max coin amount
for coin in additions:
if coin.amount < 0:
@ -319,7 +319,7 @@ class MempoolManager:
MempoolInclusionStatus.FAILED,
Err.COIN_AMOUNT_EXCEEDS_MAXIMUM,
)
addition_amount = uint64(addition_amount + coin.amount)
addition_amount = addition_amount + coin.amount
# Check for duplicate outputs
addition_counter = collections.Counter(_.name() for _ in additions)
for k, v in addition_counter.items():
@ -336,7 +336,7 @@ class MempoolManager:
removal_record_dict: Dict[bytes32, CoinRecord] = {}
removal_coin_dict: Dict[bytes32, Coin] = {}
removal_amount = uint64(0)
removal_amount: int = 0
for name in removal_names:
removal_record = await self.coin_store.get_coin_record(name)
if removal_record is None and name not in additions_dict:
@ -359,7 +359,7 @@ class MempoolManager:
)
assert removal_record is not None
removal_amount = uint64(removal_amount + removal_record.coin.amount)
removal_amount = removal_amount + removal_record.coin.amount
removal_record_dict[name] = removal_record
removal_coin_dict[name] = removal_record.coin

View File

@ -1,6 +1,7 @@
import asyncio
import logging
from typing import Dict, List, Optional, Set, Tuple
from collections import OrderedDict as orderedDict
from typing import Dict, List, Optional, OrderedDict, Set, Tuple
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.ints import uint32, uint128
@ -12,7 +13,7 @@ class SyncStore:
# Whether or not we are syncing
sync_mode: bool
long_sync: bool
peak_to_peer: Dict[bytes32, Set[bytes32]] # Header hash : peer node id
peak_to_peer: OrderedDict[bytes32, Set[bytes32]] # Header hash : peer node id
peer_to_peak: Dict[bytes32, Tuple[bytes32, uint32, uint128]] # peer node id : [header_hash, height, weight]
sync_target_header_hash: Optional[bytes32] # Peak hash we are syncing towards
sync_target_height: Optional[uint32] # Peak height we are syncing towards
@ -29,7 +30,7 @@ class SyncStore:
self.sync_target_header_hash = None
self.sync_target_height = None
self.peak_fork_point = {}
self.peak_to_peer = {}
self.peak_to_peer = orderedDict()
self.peer_to_peak = {}
self.peers_changed = asyncio.Event()
@ -73,7 +74,12 @@ class SyncStore:
self.peak_to_peer[header_hash].add(peer_id)
else:
self.peak_to_peer[header_hash] = {peer_id}
if len(self.peak_to_peer) > 256: # nice power of two
item = self.peak_to_peer.popitem(last=False) # Remove the oldest entry
# sync target hash is used throughout the sync process and should not be deleted.
if item[0] == self.sync_target_header_hash:
self.peak_to_peer[item[0]] = item[1] # Put it back in if it was the sync target
self.peak_to_peer.popitem(last=False) # Remove the oldest entry again
if new_peak:
self.peer_to_peak[peer_id] = (header_hash, height, weight)
@ -126,7 +132,7 @@ class SyncStore:
"""
Clears the peak_to_peer info which can get quite large.
"""
self.peak_to_peer = {}
self.peak_to_peer = orderedDict()
def peer_disconnected(self, node_id: bytes32):
if node_id in self.peer_to_peak:

View File

@ -5,7 +5,8 @@ import math
import pathlib
import random
from concurrent.futures.process import ProcessPoolExecutor
from typing import Dict, List, Optional, Tuple
import tempfile
from typing import Dict, IO, List, Optional, Tuple
from chia.consensus.block_header_validation import validate_finished_header_block
from chia.consensus.block_record import BlockRecord
@ -19,6 +20,7 @@ from chia.consensus.pot_iterations import (
calculate_sp_iters,
is_overflow_block,
)
from chia.util.chunks import chunks
from chia.consensus.vdf_info_computation import get_signage_point_vdf_info
from chia.types.blockchain_format.classgroup import ClassgroupElement
from chia.types.blockchain_format.sized_bytes import bytes32
@ -43,6 +45,10 @@ from chia.util.streamable import dataclass_from_dict, recurse_jsonify
log = logging.getLogger(__name__)
def _create_shutdown_file() -> IO:
return tempfile.NamedTemporaryFile(prefix="chia_full_node_weight_proof_handler_executor_shutdown_trigger")
class WeightProofHandler:
LAMBDA_L = 100
@ -594,7 +600,13 @@ class WeightProofHandler:
peak_height = weight_proof.recent_chain_data[-1].reward_chain_block.height
log.info(f"validate weight proof peak height {peak_height}")
# TODO: Consider if this can be spun off to a thread as an alternative to
# sprinkling async sleeps around.
# timing reference: start
summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self.constants, weight_proof)
await asyncio.sleep(0) # break up otherwise multi-second sync code
# timing reference: 1 second
if summaries is None:
log.error("weight proof failed sub epoch data validation")
return False, uint32(0), []
@ -605,38 +617,65 @@ class WeightProofHandler:
log.error("failed weight proof sub epoch sample validation")
return False, uint32(0), []
executor = ProcessPoolExecutor(self._num_processes)
constants, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes = vars_to_bytes(
self.constants, summaries, weight_proof
)
# timing reference: 1 second
# TODO: Consider implementing an async polling closer for the executor.
with ProcessPoolExecutor(max_workers=self._num_processes) as executor:
# The shutdown file manager must be inside of the executor manager so that
# we request the workers close prior to waiting for them to close.
with _create_shutdown_file() as shutdown_file:
await asyncio.sleep(0) # break up otherwise multi-second sync code
# timing reference: 1.1 second
constants, summary_bytes, wp_segment_bytes, wp_recent_chain_bytes = vars_to_bytes(
self.constants, summaries, weight_proof
)
await asyncio.sleep(0) # break up otherwise multi-second sync code
recent_blocks_validation_task = asyncio.get_running_loop().run_in_executor(
executor, _validate_recent_blocks, constants, wp_recent_chain_bytes, summary_bytes
)
# timing reference: 2 second
recent_blocks_validation_task = asyncio.get_running_loop().run_in_executor(
executor,
_validate_recent_blocks,
constants,
wp_recent_chain_bytes,
summary_bytes,
pathlib.Path(shutdown_file.name),
)
segments_validated, vdfs_to_validate = _validate_sub_epoch_segments(
constants, rng, wp_segment_bytes, summary_bytes
)
if not segments_validated:
return False, uint32(0), []
# timing reference: 2 second
segments_validated, vdfs_to_validate = _validate_sub_epoch_segments(
constants, rng, wp_segment_bytes, summary_bytes
)
await asyncio.sleep(0) # break up otherwise multi-second sync code
if not segments_validated:
return False, uint32(0), []
vdf_chunks = chunks(vdfs_to_validate, self._num_processes)
vdf_tasks = []
for chunk in vdf_chunks:
byte_chunks = []
for vdf_proof, classgroup, vdf_info in chunk:
byte_chunks.append((bytes(vdf_proof), bytes(classgroup), bytes(vdf_info)))
# timing reference: 4 second
vdf_chunks = chunks(vdfs_to_validate, self._num_processes)
vdf_tasks = []
# timing reference: 4 second
for chunk in vdf_chunks:
byte_chunks = []
for vdf_proof, classgroup, vdf_info in chunk:
byte_chunks.append((bytes(vdf_proof), bytes(classgroup), bytes(vdf_info)))
vdf_task = asyncio.get_running_loop().run_in_executor(executor, _validate_vdf_batch, constants, byte_chunks)
vdf_tasks.append(vdf_task)
vdf_task = asyncio.get_running_loop().run_in_executor(
executor,
_validate_vdf_batch,
constants,
byte_chunks,
pathlib.Path(shutdown_file.name),
)
vdf_tasks.append(vdf_task)
# give other stuff a turn
await asyncio.sleep(0)
for vdf_task in vdf_tasks:
validated = await vdf_task
if not validated:
return False, uint32(0), []
# timing reference: 4 second
for vdf_task in asyncio.as_completed(fs=vdf_tasks):
validated = await vdf_task
if not validated:
return False, uint32(0), []
valid_recent_blocks_task = recent_blocks_validation_task
valid_recent_blocks = await valid_recent_blocks_task
valid_recent_blocks_task = recent_blocks_validation_task
valid_recent_blocks = await valid_recent_blocks_task
if not valid_recent_blocks:
log.error("failed validating weight proof recent blocks")
return False, uint32(0), []
@ -835,11 +874,6 @@ def handle_end_of_slot(
)
def chunks(some_list, chunk_size):
chunk_size = max(1, chunk_size)
return (some_list[i : i + chunk_size] for i in range(0, len(some_list), chunk_size))
def compress_segments(full_segment_index, segments: List[SubEpochChallengeSegment]) -> List[SubEpochChallengeSegment]:
compressed_segments = []
compressed_segments.append(segments[0])
@ -1294,10 +1328,20 @@ def validate_recent_blocks(
return True, [bytes(sub) for sub in sub_blocks._block_records.values()]
def _validate_recent_blocks(constants_dict: Dict, recent_chain_bytes: bytes, summaries_bytes: List[bytes]) -> bool:
def _validate_recent_blocks(
constants_dict: Dict,
recent_chain_bytes: bytes,
summaries_bytes: List[bytes],
shutdown_file_path: Optional[pathlib.Path] = None,
) -> bool:
constants, summaries = bytes_to_vars(constants_dict, summaries_bytes)
recent_chain: RecentChainData = RecentChainData.from_bytes(recent_chain_bytes)
success, records = validate_recent_blocks(constants, recent_chain, summaries)
success, records = validate_recent_blocks(
constants=constants,
recent_chain=recent_chain,
summaries=summaries,
shutdown_file_path=shutdown_file_path,
)
return success
@ -1309,7 +1353,12 @@ def _validate_recent_blocks_and_get_records(
) -> Tuple[bool, List[bytes]]:
constants, summaries = bytes_to_vars(constants_dict, summaries_bytes)
recent_chain: RecentChainData = RecentChainData.from_bytes(recent_chain_bytes)
return validate_recent_blocks(constants, recent_chain, summaries, shutdown_file_path)
return validate_recent_blocks(
constants=constants,
recent_chain=recent_chain,
summaries=summaries,
shutdown_file_path=shutdown_file_path,
)
def _validate_pospace_recent_chain(

View File

@ -45,6 +45,7 @@ def get_madmax_install_info(plotters_root_path: Path) -> Optional[Dict[str, Any]
supported: bool = is_madmax_supported()
if get_madmax_executable_path_for_ksize(plotters_root_path).exists():
version = None
try:
proc = run_command(
[os.fspath(get_madmax_executable_path_for_ksize(plotters_root_path)), "--version"],
@ -54,7 +55,8 @@ def get_madmax_install_info(plotters_root_path: Path) -> Optional[Dict[str, Any]
)
version = proc.stdout.strip()
except Exception as e:
print(f"Failed to determine madmax version: {e}")
tb = traceback.format_exc()
log.error(f"Failed to determine madmax version: {e} {tb}")
if version is not None:
installed = True

View File

@ -58,6 +58,25 @@ def load_pool_config(root_path: Path) -> List[PoolWalletConfig]:
return ret_list
# TODO: remove this a few versions after 1.3, since authentication_public_key is deprecated. This is here to support
# downgrading to versions older than 1.3.
def add_auth_key(root_path: Path, config_entry: PoolWalletConfig, auth_key: G1Element):
config = load_config(root_path, "config.yaml")
pool_list = config["pool"].get("pool_list", [])
if pool_list is not None:
for pool_config_dict in pool_list:
try:
if (
G1Element.from_bytes(hexstr_to_bytes(pool_config_dict["owner_public_key"]))
== config_entry.owner_public_key
):
pool_config_dict["authentication_public_key"] = bytes(auth_key).hex()
except Exception as e:
log.error(f"Exception updating config: {pool_config_dict} {e}")
config["pool"]["pool_list"] = pool_list
save_config(root_path, "config.yaml", config)
async def update_pool_config(root_path: Path, pool_config_list: List[PoolWalletConfig]):
full_config = load_config(root_path, "config.yaml")
full_config["pool"]["pool_list"] = [c.to_json_dict() for c in pool_config_list]

View File

@ -1,3 +1,4 @@
import dataclasses
import logging
import time
from typing import Any, Optional, Set, Tuple, List, Dict
@ -350,7 +351,7 @@ class PoolWallet:
p2_puzzle_hash: bytes32 = (await self.get_current_state()).p2_singleton_puzzle_hash
await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id, create_puzzle_hashes=False)
await self.wallet_state_manager.add_interested_puzzle_hash(p2_puzzle_hash, self.wallet_id, False)
await self.wallet_state_manager.add_interested_puzzle_hashes([p2_puzzle_hash], [self.wallet_id], False)
return self
@staticmethod
@ -404,7 +405,7 @@ class PoolWallet:
balance = await standard_wallet.get_confirmed_balance(unspent_records)
if balance < PoolWallet.MINIMUM_INITIAL_BALANCE:
raise ValueError("Not enough balance in main wallet to create a managed plotting pool.")
if balance < fee:
if balance < PoolWallet.MINIMUM_INITIAL_BALANCE + fee:
raise ValueError("Not enough balance in main wallet to create a managed plotting pool with fee {fee}.")
# Verify Parameters - raise if invalid
@ -413,6 +414,7 @@ class PoolWallet:
spend_bundle, singleton_puzzle_hash, launcher_coin_id = await PoolWallet.generate_launcher_spend(
standard_wallet,
uint64(1),
fee,
initial_target_state,
wallet_state_manager.constants.GENESIS_CHALLENGE,
p2_singleton_delay_time,
@ -459,9 +461,15 @@ class PoolWallet:
async def sign(self, coin_spend: CoinSpend) -> SpendBundle:
async def pk_to_sk(pk: G1Element) -> PrivateKey:
sk, _ = await self._get_owner_key_cache()
assert sk.get_g1() == pk
return sk
s = find_owner_sk([self.wallet_state_manager.private_key], pk)
if s is None:
return self.standard_wallet.secret_key_store.secret_key_for_public_key(pk)
else:
# Note that pool_wallet_index may be from another wallet than self.wallet_id
owner_sk, pool_wallet_index = s
if owner_sk is None:
return self.standard_wallet.secret_key_store.secret_key_for_public_key(pk)
return owner_sk
return await sign_coin_spends(
[coin_spend],
@ -470,7 +478,25 @@ class PoolWallet:
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
)
async def generate_travel_transaction(self, fee: uint64) -> TransactionRecord:
async def generate_fee_transaction(self, fee: uint64, coin_announcements=None) -> TransactionRecord:
fee_tx = await self.standard_wallet.generate_signed_transaction(
uint64(0),
(await self.standard_wallet.get_new_puzzlehash()),
fee=fee,
origin_id=None,
coins=None,
primaries=None,
ignore_max_send_amount=False,
coin_announcements_to_consume=coin_announcements,
)
return fee_tx
async def publish_transactions(self, travel_tx: TransactionRecord, fee_tx: Optional[TransactionRecord]):
await self.wallet_state_manager.add_pending_transaction(travel_tx)
if fee_tx is not None:
await self.wallet_state_manager.add_pending_transaction(dataclasses.replace(fee_tx, spend_bundle=None))
async def generate_travel_transactions(self, fee: uint64) -> Tuple[TransactionRecord, Optional[TransactionRecord]]:
# target_state is contained within pool_wallet_state
pool_wallet_info: PoolWalletInfo = await self.get_current_state()
@ -542,8 +568,11 @@ class PoolWallet:
else:
raise RuntimeError("Invalid state")
signed_spend_bundle = await self.sign(outgoing_coin_spend)
fee_tx = None
if fee > 0:
fee_tx = await self.generate_fee_transaction(fee)
signed_spend_bundle = await self.sign(outgoing_coin_spend)
assert signed_spend_bundle.removals()[0].puzzle_hash == singleton.puzzle_hash
assert signed_spend_bundle.removals()[0].name() == singleton.name()
assert signed_spend_bundle is not None
@ -566,12 +595,15 @@ class PoolWallet:
type=uint32(TransactionType.OUTGOING_TX.value),
name=signed_spend_bundle.name(),
)
return tx_record
await self.publish_transactions(tx_record, fee_tx)
return tx_record, fee_tx
@staticmethod
async def generate_launcher_spend(
standard_wallet: Wallet,
amount: uint64,
fee: uint64,
initial_target_state: PoolState,
genesis_challenge: bytes32,
delay_time: uint64,
@ -581,12 +613,10 @@ class PoolWallet:
Creates the initial singleton, which includes spending an origin coin, the launcher, and creating a singleton
with the "pooling" inner state, which can be either self pooling or using a pool
"""
coins: Set[Coin] = await standard_wallet.select_coins(amount)
coins: Set[Coin] = await standard_wallet.select_coins(uint64(amount + fee))
if coins is None:
raise ValueError("Not enough coins to create pool wallet")
assert len(coins) == 1
launcher_parent: Coin = coins.copy().pop()
genesis_launcher_puz: Program = SINGLETON_LAUNCHER
launcher_coin: Coin = Coin(launcher_parent.name(), genesis_launcher_puz.get_tree_hash(), amount)
@ -629,8 +659,8 @@ class PoolWallet:
create_launcher_tx_record: Optional[TransactionRecord] = await standard_wallet.generate_signed_transaction(
amount,
genesis_launcher_puz.get_tree_hash(),
uint64(0),
None,
fee,
launcher_parent.name(),
coins,
None,
False,
@ -651,7 +681,9 @@ class PoolWallet:
full_spend: SpendBundle = SpendBundle.aggregate([create_launcher_tx_record.spend_bundle, launcher_sb])
return full_spend, puzzle_hash, launcher_coin.name()
async def join_pool(self, target_state: PoolState, fee: uint64) -> Tuple[uint64, TransactionRecord]:
async def join_pool(
self, target_state: PoolState, fee: uint64
) -> Tuple[uint64, TransactionRecord, Optional[TransactionRecord]]:
if target_state.state != FARMING_TO_POOL:
raise ValueError(f"join_pool must be called with target_state={FARMING_TO_POOL} (FARMING_TO_POOL)")
if self.target_state is not None:
@ -689,12 +721,10 @@ class PoolWallet:
self.target_state = target_state
self.next_transaction_fee = fee
tx_record: TransactionRecord = await self.generate_travel_transaction(fee)
await self.wallet_state_manager.add_pending_transaction(tx_record)
travel_tx, fee_tx = await self.generate_travel_transactions(fee)
return total_fee, travel_tx, fee_tx
return total_fee, tx_record
async def self_pool(self, fee: uint64) -> Tuple[uint64, TransactionRecord]:
async def self_pool(self, fee: uint64) -> Tuple[uint64, TransactionRecord, Optional[TransactionRecord]]:
if await self.have_unconfirmed_transaction():
raise ValueError(
"Cannot self pool due to unconfirmed transaction. If this is stuck, delete the unconfirmed transaction."
@ -725,11 +755,10 @@ class PoolWallet:
SELF_POOLING, owner_puzzlehash, owner_pubkey, pool_url=None, relative_lock_height=uint32(0)
)
self.next_transaction_fee = fee
tx_record = await self.generate_travel_transaction(fee)
await self.wallet_state_manager.add_pending_transaction(tx_record)
return total_fee, tx_record
travel_tx, fee_tx = await self.generate_travel_transactions(fee)
return total_fee, travel_tx, fee_tx
async def claim_pool_rewards(self, fee: uint64) -> TransactionRecord:
async def claim_pool_rewards(self, fee: uint64) -> Tuple[TransactionRecord, Optional[TransactionRecord]]:
# Search for p2_puzzle_hash coins, and spend them with the singleton
if await self.have_unconfirmed_transaction():
raise ValueError(
@ -757,9 +786,12 @@ class PoolWallet:
all_spends: List[CoinSpend] = []
total_amount = 0
current_coin_record = None
for coin_record in unspent_coin_records:
if coin_record.coin not in coin_to_height_farmed:
continue
current_coin_record = coin_record
if len(all_spends) >= 100:
# Limit the total number of spends, so it fits into the block
break
@ -778,32 +810,44 @@ class PoolWallet:
self.log.info(
f"Farmer coin: {coin_record.coin} {coin_record.coin.name()} {coin_to_height_farmed[coin_record.coin]}"
)
if len(all_spends) == 0:
if len(all_spends) == 0 or current_coin_record is None:
raise ValueError("Nothing to claim, no unspent coinbase rewards")
# No signatures are required to absorb
spend_bundle: SpendBundle = SpendBundle(all_spends, G2Element())
claim_spend: SpendBundle = SpendBundle(all_spends, G2Element())
# If fee is 0, no signatures are required to absorb
full_spend: SpendBundle = claim_spend
fee_tx = None
if fee > 0:
absorb_announce = Announcement(current_coin_record.coin.name(), b"$")
fee_tx = await self.generate_fee_transaction(fee, coin_announcements=[absorb_announce])
full_spend = SpendBundle.aggregate([fee_tx.spend_bundle, claim_spend])
assert full_spend.fees() == fee
current_time = uint64(int(time.time()))
# The claim spend, minus the fee amount from the main wallet
absorb_transaction: TransactionRecord = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
created_at_time=current_time,
to_puzzle_hash=current_state.current.target_puzzle_hash,
amount=uint64(total_amount),
fee_amount=fee,
fee_amount=fee, # This will not be double counted in self.standard_wallet
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=uint32(self.wallet_id),
sent_to=[],
memos=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=spend_bundle.name(),
name=full_spend.name(),
)
await self.wallet_state_manager.add_pending_transaction(absorb_transaction)
return absorb_transaction
await self.publish_transactions(absorb_transaction, fee_tx)
return absorb_transaction, fee_tx
async def new_peak(self, peak_height: uint64) -> None:
# This gets called from the WalletStateManager whenever there is a new peak
@ -847,8 +891,7 @@ class PoolWallet:
assert self.target_state.relative_lock_height >= self.MINIMUM_RELATIVE_LOCK_HEIGHT
assert self.target_state.pool_url is not None
tx_record = await self.generate_travel_transaction(self.next_transaction_fee)
await self.wallet_state_manager.add_pending_transaction(tx_record)
await self.generate_travel_transactions(self.next_transaction_fee)
async def have_unconfirmed_transaction(self) -> bool:
unconfirmed: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(

View File

@ -1,6 +1,6 @@
from dataclasses import dataclass
from enum import IntEnum
from typing import Optional, Dict
from typing import Optional, Dict, Any
from blspy import G1Element
@ -10,7 +10,7 @@ from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.byte_types import hexstr_to_bytes
from chia.util.ints import uint32, uint8
from chia.util.streamable import streamable, Streamable
from chia.util.streamable import streamable, Streamable, dataclass_from_dict
class PoolSingletonState(IntEnum):
@ -113,3 +113,7 @@ class PoolWalletInfo(Streamable):
current_inner: Program # Inner puzzle in current singleton, not revealed yet
tip_singleton_coin_id: bytes32
singleton_block_height: uint32 # Block height that current PoolState is from
@classmethod
def from_json_dict(cls: Any, json_dict: Dict) -> Any:
return dataclass_from_dict(cls, json_dict)

View File

@ -30,6 +30,7 @@ VALID_REPLY_MESSAGE_MAP = {
pmt.request_signage_point_or_end_of_sub_slot: [pmt.respond_signage_point, pmt.respond_end_of_sub_slot],
pmt.request_compact_vdf: [pmt.respond_compact_vdf],
pmt.request_peers: [pmt.respond_peers],
pmt.request_header_blocks: [pmt.respond_header_blocks, pmt.reject_header_blocks],
}

View File

@ -0,0 +1,72 @@
import ipaddress
from typing import Any, Callable, Dict, List, Optional
from chia.seeder.crawler import Crawler
from chia.util.ws_message import WsRpcMessage, create_payload_dict
class CrawlerRpcApi:
def __init__(self, crawler: Crawler):
self.service = crawler
self.service_name = "chia_crawler"
def get_routes(self) -> Dict[str, Callable]:
return {
"/get_peer_counts": self.get_peer_counts,
"/get_ips_after_timestamp": self.get_ips_after_timestamp,
}
async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> List[WsRpcMessage]:
payloads = []
if change_data is None:
change_data = await self.get_peer_counts({})
if change in ("crawl_batch_completed", "loaded_initial_peers"):
payloads.append(create_payload_dict(change, change_data, self.service_name, "metrics"))
return payloads
async def get_peer_counts(self, _request: Dict) -> Dict[str, Any]:
ipv6_addresses_count = 0
for host in self.service.best_timestamp_per_peer.keys():
try:
ipaddress.IPv6Address(host)
ipv6_addresses_count += 1
except ipaddress.AddressValueError:
continue
reliable_peers = 0
if self.service.crawl_store is not None:
reliable_peers = self.service.crawl_store.get_reliable_peers()
data = {
"peer_counts": {
"total_last_5_days": len(self.service.best_timestamp_per_peer),
"reliable_nodes": reliable_peers,
"ipv4_last_5_days": len(self.service.best_timestamp_per_peer) - ipv6_addresses_count,
"ipv6_last_5_days": ipv6_addresses_count,
"versions": self.service.versions,
}
}
return data
async def get_ips_after_timestamp(self, _request: Dict) -> Dict[str, Any]:
after = _request.get("after", None)
if after is None:
raise ValueError("`after` is required and must be a unix timestamp")
offset = _request.get("offset", 0)
limit = _request.get("limit", 10000)
matched_ips: List[str] = []
for ip, timestamp in self.service.best_timestamp_per_peer.items():
if timestamp > after:
matched_ips.append(ip)
matched_ips.sort()
return {
"ips": matched_ips[offset : (offset + limit)],
"total": len(matched_ips),
}

View File

@ -86,13 +86,11 @@ class FullNodeRpcApi:
"metrics",
)
)
return payloads
if change == "block":
payloads.append(create_payload_dict("block", change_data, self.service_name, "metrics"))
return payloads
if change in ("block", "signage_point"):
payloads.append(create_payload_dict(change, change_data, self.service_name, "metrics"))
return []
return payloads
# this function is just here for backwards-compatibility. It will probably
# be removed in the future
@ -123,6 +121,7 @@ class FullNodeRpcApi:
"mempool_min_fees": {
"cost_5000000": 0,
},
"mempool_max_total_cost": 0,
"block_max_cost": 0,
"node_id": node_id,
},
@ -171,10 +170,12 @@ class FullNodeRpcApi:
mempool_size = len(self.service.mempool_manager.mempool.spends)
mempool_cost = self.service.mempool_manager.mempool.total_mempool_cost
mempool_min_fee_5m = self.service.mempool_manager.mempool.get_min_fee_rate(5000000)
mempool_max_total_cost = self.service.mempool_manager.mempool_max_total_cost
else:
mempool_size = 0
mempool_cost = 0
mempool_min_fee_5m = 0
mempool_max_total_cost = 0
if self.service.server is not None:
is_connected = len(self.service.server.get_full_node_connections()) > 0
else:
@ -202,6 +203,7 @@ class FullNodeRpcApi:
# This Dict sets us up for that in the future
"cost_5000000": mempool_min_fee_5m,
},
"mempool_max_total_cost": mempool_max_total_cost,
"block_max_cost": self.service.constants.MAX_BLOCK_COST_CLVM,
"node_id": node_id,
},

View File

@ -0,0 +1,24 @@
from typing import Any, Callable, Dict, List, Optional
from chia.timelord.timelord import Timelord
from chia.util.ws_message import WsRpcMessage, create_payload_dict
class TimelordRpcApi:
def __init__(self, timelord: Timelord):
self.service = timelord
self.service_name = "chia_timelord"
def get_routes(self) -> Dict[str, Callable]:
return {}
async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> List[WsRpcMessage]:
payloads = []
if change_data is None:
change_data = {}
if change in ("finished_pot", "new_compact_proof", "skipping_peak", "new_peak"):
payloads.append(create_payload_dict(change, change_data, self.service_name, "metrics"))
return payloads

View File

@ -136,12 +136,13 @@ class WalletRpcApi:
Called by the WalletNode or WalletStateManager when something has changed in the wallet. This
gives us an opportunity to send notifications to all connected clients via WebSocket.
"""
payloads = []
if args[0] is not None and args[0] == "sync_changed":
# Metrics is the only current consumer for this event
return [create_payload_dict(args[0], {}, self.service_name, "metrics")]
payloads.append(create_payload_dict(args[0], {}, self.service_name, "metrics"))
if len(args) < 2:
return []
return payloads
data = {
"state": args[0],
@ -151,7 +152,7 @@ class WalletRpcApi:
if args[2] is not None:
data["additional_data"] = args[2]
payloads = [create_payload_dict("state_changed", data, self.service_name, "wallet_ui")]
payloads.append(create_payload_dict("state_changed", data, self.service_name, "wallet_ui"))
if args[0] == "coin_added":
payloads.append(create_payload_dict(args[0], data, self.service_name, "metrics"))
@ -169,6 +170,15 @@ class WalletRpcApi:
if peers_close_task is not None:
await peers_close_task
async def _convert_tx_puzzle_hash(self, tx: TransactionRecord) -> TransactionRecord:
assert self.service.wallet_state_manager is not None
return dataclasses.replace(
tx,
to_puzzle_hash=(
await self.service.wallet_state_manager.convert_puzzle_hash(tx.wallet_id, tx.to_puzzle_hash)
),
)
##########################################################################################
# Key management
##########################################################################################
@ -383,7 +393,7 @@ class WalletRpcApi:
async def get_height_info(self, request: Dict):
assert self.service.wallet_state_manager is not None
height = self.service.wallet_state_manager.blockchain.get_peak_height()
height = await self.service.wallet_state_manager.blockchain.get_finished_sync_up_to()
return {"height": height}
async def get_network_info(self, request: Dict):
@ -666,7 +676,7 @@ class WalletRpcApi:
raise ValueError(f"Transaction 0x{transaction_id.hex()} not found")
return {
"transaction": tr.to_json_dict_convenience(self.service.config),
"transaction": (await self._convert_tx_puzzle_hash(tr)).to_json_dict_convenience(self.service.config),
"transaction_id": tr.name,
}
@ -680,11 +690,19 @@ class WalletRpcApi:
sort_key = request.get("sort_key", None)
reverse = request.get("reverse", False)
to_address = request.get("to_address", None)
to_puzzle_hash: Optional[bytes32] = None
if to_address is not None:
to_puzzle_hash = decode_puzzle_hash(to_address)
transactions = await self.service.wallet_state_manager.tx_store.get_transactions_between(
wallet_id, start, end, sort_key=sort_key, reverse=reverse
wallet_id, start, end, sort_key=sort_key, reverse=reverse, to_puzzle_hash=to_puzzle_hash
)
return {
"transactions": [tr.to_json_dict_convenience(self.service.config) for tr in transactions],
"transactions": [
(await self._convert_tx_puzzle_hash(tr)).to_json_dict_convenience(self.service.config)
for tr in transactions
],
"wallet_id": wallet_id,
}
@ -836,7 +854,7 @@ class WalletRpcApi:
memos: List[bytes] = []
if "memos" in request:
memos = [mem.encode("utf-8") for mem in request["memos"]]
if not isinstance(request["amount"], int) or not isinstance(request["amount"], int):
if not isinstance(request["amount"], int) or not isinstance(request["fee"], int):
raise ValueError("An integer amount or fee is required (too many decimals)")
amount: uint64 = uint64(request["amount"])
if "fee" in request:
@ -1331,8 +1349,8 @@ class WalletRpcApi:
uint32(request["relative_lock_height"]),
)
async with self.service.wallet_state_manager.lock:
total_fee, tx = await wallet.join_pool(new_target_state, fee)
return {"total_fee": total_fee, "transaction": tx}
total_fee, tx, fee_tx = await wallet.join_pool(new_target_state, fee)
return {"total_fee": total_fee, "transaction": tx, "fee_transaction": fee_tx}
async def pw_self_pool(self, request) -> Dict:
if self.service.wallet_state_manager is None:
@ -1348,8 +1366,8 @@ class WalletRpcApi:
raise ValueError("Wallet needs to be fully synced.")
async with self.service.wallet_state_manager.lock:
total_fee, tx = await wallet.self_pool(fee) # total_fee: uint64, tx: TransactionRecord
return {"total_fee": total_fee, "transaction": tx}
total_fee, tx, fee_tx = await wallet.self_pool(fee)
return {"total_fee": total_fee, "transaction": tx, "fee_transaction": fee_tx}
async def pw_absorb_rewards(self, request) -> Dict:
"""Perform a sweep of the p2_singleton rewards controlled by the pool wallet singleton"""
@ -1362,9 +1380,9 @@ class WalletRpcApi:
wallet: PoolWallet = self.service.wallet_state_manager.wallets[wallet_id]
async with self.service.wallet_state_manager.lock:
transaction: TransactionRecord = await wallet.claim_pool_rewards(fee)
transaction, fee_tx = await wallet.claim_pool_rewards(fee)
state: PoolWalletInfo = await wallet.get_current_state()
return {"state": state.to_json_dict(), "transaction": transaction}
return {"state": state.to_json_dict(), "transaction": transaction, "fee_transaction": fee_tx}
async def pw_status(self, request) -> Dict:
"""Return the complete state of the Pool wallet with id `request["wallet_id"]`"""

View File

@ -329,7 +329,7 @@ class WalletRpcClient(RpcClient):
async def pw_join_pool(
self, wallet_id: str, target_puzzlehash: bytes32, pool_url: str, relative_lock_height: uint32, fee: uint64
) -> TransactionRecord:
) -> Dict:
request = {
"wallet_id": int(wallet_id),
"target_puzzlehash": target_puzzlehash.hex(),
@ -338,13 +338,19 @@ class WalletRpcClient(RpcClient):
"fee": fee,
}
join_reply = await self.fetch("pw_join_pool", request)
return TransactionRecord.from_json_dict(join_reply["transaction"])
reply = await self.fetch("pw_join_pool", request)
reply["transaction"] = TransactionRecord.from_json_dict(reply["transaction"])
if reply["fee_transaction"]:
reply["fee_transaction"] = TransactionRecord.from_json_dict(reply["fee_transaction"])
return reply["transaction"]
async def pw_absorb_rewards(self, wallet_id: str, fee: uint64 = uint64(0)) -> TransactionRecord:
return TransactionRecord.from_json_dict(
(await self.fetch("pw_absorb_rewards", {"wallet_id": wallet_id, "fee": fee}))["transaction"]
)
async def pw_absorb_rewards(self, wallet_id: str, fee: uint64 = uint64(0)) -> Dict:
reply = await self.fetch("pw_absorb_rewards", {"wallet_id": wallet_id, "fee": fee})
reply["state"] = PoolWalletInfo.from_json_dict(reply["state"])
reply["transaction"] = TransactionRecord.from_json_dict(reply["transaction"])
if reply["fee_transaction"]:
reply["fee_transaction"] = TransactionRecord.from_json_dict(reply["fee_transaction"])
return reply
async def pw_status(self, wallet_id: str) -> Tuple[PoolWalletInfo, List[TransactionRecord]]:
json_dict = await self.fetch("pw_status", {"wallet_id": wallet_id})

View File

@ -3,6 +3,7 @@ import logging
import time
import traceback
import ipaddress
from collections import defaultdict
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple
@ -27,13 +28,15 @@ class Crawler:
coin_store: CoinStore
connection: aiosqlite.Connection
config: Dict
server: Any
server: Optional[ChiaServer]
crawl_store: Optional[CrawlStore]
log: logging.Logger
constants: ConsensusConstants
_shut_down: bool
root_path: Path
peer_count: int
with_peak: set
minimum_version_count: int
def __init__(
self,
@ -58,16 +61,19 @@ class Crawler:
self.version_cache: List[Tuple[str, str]] = []
self.handshake_time: Dict[str, int] = {}
self.best_timestamp_per_peer: Dict[str, int] = {}
if "crawler_db_path" in config and config["crawler_db_path"] != "":
path = Path(config["crawler_db_path"])
self.db_path = path.resolve()
else:
db_path_replaced: str = "crawler.db"
self.db_path = path_from_root(root_path, db_path_replaced)
crawler_db_path: str = config.get("crawler_db_path", "crawler.db")
self.db_path = path_from_root(root_path, crawler_db_path)
mkdir(self.db_path.parent)
self.bootstrap_peers = config["bootstrap_peers"]
self.minimum_height = config["minimum_height"]
self.other_peers_port = config["other_peers_port"]
self.versions: Dict[str, int] = defaultdict(lambda: 0)
self.minimum_version_count = self.config.get("minimum_version_count", 100)
if self.minimum_version_count < 1:
self.log.warning(
f"Crawler configuration minimum_version_count expected to be greater than zero: "
f"{self.minimum_version_count!r}"
)
def _set_state_changed_callback(self, callback: Callable):
self.state_changed_callback = callback
@ -111,9 +117,20 @@ class Crawler:
await self.crawl_store.peer_failed_to_connect(peer)
async def _start(self):
# We override the default peer_connect_timeout when running from the crawler
crawler_peer_timeout = self.config.get("peer_connect_timeout", 2)
self.server.config["peer_connect_timeout"] = crawler_peer_timeout
self.task = asyncio.create_task(self.crawl())
async def crawl(self):
# Ensure the state_changed callback is set up before moving on
# Sometimes, the daemon connection + state changed callback isn't up and ready
# by the time we get to the first _state_changed call, so this just ensures it's there before moving on
while self.state_changed_callback is None:
self.log.info("Waiting for state changed callback...")
await asyncio.sleep(0.1)
try:
self.connection = await aiosqlite.connect(self.db_path)
self.crawl_store = await CrawlStore.create(self.connection)
@ -142,6 +159,12 @@ class Crawler:
self.host_to_version, self.handshake_time = self.crawl_store.load_host_to_version()
self.best_timestamp_per_peer = self.crawl_store.load_best_peer_reliability()
self.versions = defaultdict(lambda: 0)
for host, version in self.host_to_version.items():
self.versions[version] += 1
self._state_changed("loaded_initial_peers")
while True:
self.with_peak = set()
peers_to_crawl = await self.crawl_store.get_peers_to_crawl(25000, 250000)
@ -217,11 +240,9 @@ class Crawler:
for host, timestamp in self.best_timestamp_per_peer.items()
if timestamp >= now - 5 * 24 * 3600
}
versions = {}
self.versions = defaultdict(lambda: 0)
for host, version in self.host_to_version.items():
if version not in versions:
versions[version] = 0
versions[version] += 1
self.versions[version] += 1
self.version_cache = []
self.peers_retrieved = []
@ -263,9 +284,9 @@ class Crawler:
ipv6_addresses_count = 0
for host in self.best_timestamp_per_peer.keys():
try:
_ = ipaddress.IPv6Address(host)
ipaddress.IPv6Address(host)
ipv6_addresses_count += 1
except ValueError:
except ipaddress.AddressValueError:
continue
self.log.error(
"IPv4 addresses gossiped with timestamp in the last 5 days with respond_peers messages: "
@ -278,21 +299,17 @@ class Crawler:
ipv6_available_peers = 0
for host in self.host_to_version.keys():
try:
_ = ipaddress.IPv6Address(host)
ipaddress.IPv6Address(host)
ipv6_available_peers += 1
except ValueError:
except ipaddress.AddressValueError:
continue
self.log.error(
f"Total IPv4 nodes reachable in the last 5 days: {available_peers - ipv6_available_peers}."
)
self.log.error(f"Total IPv6 nodes reachable in the last 5 days: {ipv6_available_peers}.")
self.log.error("Version distribution among reachable in the last 5 days (at least 100 nodes):")
if "minimum_version_count" in self.config and self.config["minimum_version_count"] > 0:
minimum_version_count = self.config["minimum_version_count"]
else:
minimum_version_count = 100
for version, count in sorted(versions.items(), key=lambda kv: kv[1], reverse=True):
if count >= minimum_version_count:
for version, count in sorted(self.versions.items(), key=lambda kv: kv[1], reverse=True):
if count >= self.minimum_version_count:
self.log.error(f"Version: {version} - Count: {count}")
self.log.error(f"Banned addresses in the DB: {banned_peers}")
self.log.error(f"Temporary ignored addresses in the DB: {ignored_peers}")
@ -301,6 +318,8 @@ class Crawler:
f"{total_records - banned_peers - ignored_peers}"
)
self.log.error("***")
self._state_changed("crawl_batch_completed")
except Exception as e:
self.log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")

View File

@ -14,7 +14,7 @@ class CrawlerAPI:
self.crawler = crawler
def _set_state_changed_callback(self, callback: Callable):
pass
self.crawler.state_changed_callback = callback
def __getattr__(self, attr_name: str):
async def invoke(*args, **kwargs):

View File

@ -4,7 +4,8 @@ import logging
import random
import signal
import traceback
from typing import Any, List
from pathlib import Path
from typing import Any, Dict, List
import aiosqlite
from dnslib import A, AAAA, SOA, NS, MX, CNAME, RR, DNSRecord, QTYPE, DNSHeader
@ -70,15 +71,15 @@ class DNSServer:
pointer: int
crawl_db: aiosqlite.Connection
def __init__(self):
def __init__(self, config: Dict, root_path: Path):
self.reliable_peers_v4 = []
self.reliable_peers_v6 = []
self.lock = asyncio.Lock()
self.pointer_v4 = 0
self.pointer_v6 = 0
db_path_replaced: str = "crawler.db"
root_path = DEFAULT_ROOT_PATH
self.db_path = path_from_root(root_path, db_path_replaced)
crawler_db_path: str = config.get("crawler_db_path", "crawler.db")
self.db_path = path_from_root(root_path, crawler_db_path)
mkdir(self.db_path.parent)
async def start(self):
@ -227,8 +228,8 @@ class DNSServer:
log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
async def serve_dns():
dns_server = DNSServer()
async def serve_dns(config: Dict, root_path: Path):
dns_server = DNSServer(config, root_path)
await dns_server.start()
# TODO: Make this cleaner?
@ -278,7 +279,7 @@ def main():
log.info("signal handlers unsupported")
try:
loop.run_until_complete(serve_dns())
loop.run_until_complete(serve_dns(config, root_path))
finally:
loop.close()

View File

@ -5,6 +5,7 @@ from typing import Dict
from chia.consensus.constants import ConsensusConstants
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.rpc.crawler_rpc_api import CrawlerRpcApi
from chia.seeder.crawler import Crawler
from chia.seeder.crawler_api import CrawlerAPI
from chia.server.outbound_message import NodeType
@ -43,6 +44,9 @@ def service_kwargs_for_full_node_crawler(
network_id=network_id,
)
if config.get("crawler", {}).get("start_rpc_server", True):
kwargs["rpc_info"] = (CrawlerRpcApi, config.get("crawler", {}).get("rpc_port", 8561))
return kwargs

View File

@ -1,34 +0,0 @@
from pathlib import Path
import pkg_resources
from chia.util.config import load_config, save_config
def patch_default_seeder_config(root_path: Path, filename="config.yaml") -> None:
"""
Checks if the seeder: section exists in the config. If not, the default seeder settings are appended to the file
"""
existing_config = load_config(root_path, "config.yaml")
if "seeder" in existing_config:
print("Chia Seeder section exists in config. No action required.")
return
print("Chia Seeder section does not exist in config. Patching...")
config = load_config(root_path, "config.yaml")
# The following ignores root_path when the second param is absolute, which this will be
seeder_config = load_config(root_path, pkg_resources.resource_filename("chia.util", "initial-config.yaml"))
# Patch in the values with anchors, since pyyaml tends to change
# the anchors to things like id001, etc
config["seeder"] = seeder_config["seeder"]
config["seeder"]["network_overrides"] = config["network_overrides"]
config["seeder"]["selected_network"] = config["selected_network"]
config["seeder"]["logging"] = config["logging"]
# When running as crawler, we default to a much lower client timeout
config["full_node"]["peer_connect_timeout"] = 2
save_config(root_path, "config.yaml", config)

Some files were not shown because too many files have changed in this diff Show More